var/home/core/zuul-output/0000755000175000017500000000000015114252760014530 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114257547015504 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003767255215114257540017722 0ustar rootrootDec 04 09:38:19 crc systemd[1]: Starting Kubernetes Kubelet... Dec 04 09:38:19 crc restorecon[4697]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:19 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 09:38:20 crc restorecon[4697]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 09:38:20 crc restorecon[4697]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 04 09:38:20 crc kubenswrapper[4707]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 04 09:38:20 crc kubenswrapper[4707]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 04 09:38:20 crc kubenswrapper[4707]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 04 09:38:20 crc kubenswrapper[4707]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 04 09:38:20 crc kubenswrapper[4707]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 04 09:38:20 crc kubenswrapper[4707]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.682053 4707 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685107 4707 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685131 4707 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685138 4707 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685143 4707 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685148 4707 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685153 4707 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685157 4707 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685161 4707 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685166 4707 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685170 4707 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685175 4707 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685179 4707 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685184 4707 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685188 4707 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685192 4707 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685195 4707 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685203 4707 feature_gate.go:330] unrecognized feature gate: Example Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685207 4707 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685211 4707 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685214 4707 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685218 4707 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685221 4707 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685224 4707 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685228 4707 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685232 4707 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685235 4707 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685239 4707 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685242 4707 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685245 4707 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685249 4707 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685253 4707 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685257 4707 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685260 4707 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685264 4707 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685268 4707 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685271 4707 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685275 4707 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685279 4707 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685282 4707 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685287 4707 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685292 4707 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685297 4707 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685301 4707 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685305 4707 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685309 4707 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685313 4707 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685316 4707 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685320 4707 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685323 4707 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685327 4707 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685331 4707 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685355 4707 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685361 4707 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685365 4707 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685368 4707 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685373 4707 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685376 4707 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685380 4707 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685383 4707 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685386 4707 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685390 4707 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685393 4707 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685397 4707 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685400 4707 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685404 4707 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685408 4707 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685411 4707 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685415 4707 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685420 4707 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685424 4707 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.685428 4707 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685753 4707 flags.go:64] FLAG: --address="0.0.0.0" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685764 4707 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685774 4707 flags.go:64] FLAG: --anonymous-auth="true" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685780 4707 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685785 4707 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685789 4707 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685796 4707 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685802 4707 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685807 4707 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685812 4707 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685817 4707 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685822 4707 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685827 4707 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685831 4707 flags.go:64] FLAG: --cgroup-root="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685835 4707 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685840 4707 flags.go:64] FLAG: --client-ca-file="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685844 4707 flags.go:64] FLAG: --cloud-config="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685848 4707 flags.go:64] FLAG: --cloud-provider="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685852 4707 flags.go:64] FLAG: --cluster-dns="[]" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685857 4707 flags.go:64] FLAG: --cluster-domain="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685861 4707 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685865 4707 flags.go:64] FLAG: --config-dir="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685869 4707 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685873 4707 flags.go:64] FLAG: --container-log-max-files="5" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685878 4707 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685883 4707 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685887 4707 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685891 4707 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685895 4707 flags.go:64] FLAG: --contention-profiling="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685899 4707 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685903 4707 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685907 4707 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685911 4707 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685916 4707 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685920 4707 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685924 4707 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685928 4707 flags.go:64] FLAG: --enable-load-reader="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685933 4707 flags.go:64] FLAG: --enable-server="true" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685937 4707 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685944 4707 flags.go:64] FLAG: --event-burst="100" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685948 4707 flags.go:64] FLAG: --event-qps="50" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685952 4707 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685956 4707 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685961 4707 flags.go:64] FLAG: --eviction-hard="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685966 4707 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685970 4707 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685974 4707 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685978 4707 flags.go:64] FLAG: --eviction-soft="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685982 4707 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685986 4707 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685990 4707 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685994 4707 flags.go:64] FLAG: --experimental-mounter-path="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.685999 4707 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686003 4707 flags.go:64] FLAG: --fail-swap-on="true" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686007 4707 flags.go:64] FLAG: --feature-gates="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686011 4707 flags.go:64] FLAG: --file-check-frequency="20s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686016 4707 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686020 4707 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686025 4707 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686029 4707 flags.go:64] FLAG: --healthz-port="10248" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686033 4707 flags.go:64] FLAG: --help="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686037 4707 flags.go:64] FLAG: --hostname-override="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686041 4707 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686045 4707 flags.go:64] FLAG: --http-check-frequency="20s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686049 4707 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686053 4707 flags.go:64] FLAG: --image-credential-provider-config="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686057 4707 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686061 4707 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686065 4707 flags.go:64] FLAG: --image-service-endpoint="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686069 4707 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686073 4707 flags.go:64] FLAG: --kube-api-burst="100" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686077 4707 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686081 4707 flags.go:64] FLAG: --kube-api-qps="50" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686085 4707 flags.go:64] FLAG: --kube-reserved="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686090 4707 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686094 4707 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686098 4707 flags.go:64] FLAG: --kubelet-cgroups="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686102 4707 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686106 4707 flags.go:64] FLAG: --lock-file="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686110 4707 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686114 4707 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686118 4707 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686124 4707 flags.go:64] FLAG: --log-json-split-stream="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686128 4707 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686132 4707 flags.go:64] FLAG: --log-text-split-stream="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686136 4707 flags.go:64] FLAG: --logging-format="text" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686140 4707 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686145 4707 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686149 4707 flags.go:64] FLAG: --manifest-url="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686153 4707 flags.go:64] FLAG: --manifest-url-header="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686159 4707 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686163 4707 flags.go:64] FLAG: --max-open-files="1000000" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686168 4707 flags.go:64] FLAG: --max-pods="110" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686172 4707 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686176 4707 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686180 4707 flags.go:64] FLAG: --memory-manager-policy="None" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686184 4707 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686188 4707 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686193 4707 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686199 4707 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686212 4707 flags.go:64] FLAG: --node-status-max-images="50" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686216 4707 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686222 4707 flags.go:64] FLAG: --oom-score-adj="-999" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686226 4707 flags.go:64] FLAG: --pod-cidr="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686231 4707 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686240 4707 flags.go:64] FLAG: --pod-manifest-path="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686245 4707 flags.go:64] FLAG: --pod-max-pids="-1" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686249 4707 flags.go:64] FLAG: --pods-per-core="0" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686254 4707 flags.go:64] FLAG: --port="10250" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686259 4707 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686264 4707 flags.go:64] FLAG: --provider-id="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686269 4707 flags.go:64] FLAG: --qos-reserved="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686273 4707 flags.go:64] FLAG: --read-only-port="10255" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686278 4707 flags.go:64] FLAG: --register-node="true" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686282 4707 flags.go:64] FLAG: --register-schedulable="true" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686287 4707 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686294 4707 flags.go:64] FLAG: --registry-burst="10" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686298 4707 flags.go:64] FLAG: --registry-qps="5" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686302 4707 flags.go:64] FLAG: --reserved-cpus="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686306 4707 flags.go:64] FLAG: --reserved-memory="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686311 4707 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686316 4707 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686320 4707 flags.go:64] FLAG: --rotate-certificates="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686324 4707 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686328 4707 flags.go:64] FLAG: --runonce="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686352 4707 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686358 4707 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686364 4707 flags.go:64] FLAG: --seccomp-default="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686369 4707 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686374 4707 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686378 4707 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686386 4707 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686391 4707 flags.go:64] FLAG: --storage-driver-password="root" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686395 4707 flags.go:64] FLAG: --storage-driver-secure="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686399 4707 flags.go:64] FLAG: --storage-driver-table="stats" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686403 4707 flags.go:64] FLAG: --storage-driver-user="root" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686407 4707 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686411 4707 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686415 4707 flags.go:64] FLAG: --system-cgroups="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686419 4707 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686426 4707 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686430 4707 flags.go:64] FLAG: --tls-cert-file="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686434 4707 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686439 4707 flags.go:64] FLAG: --tls-min-version="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686443 4707 flags.go:64] FLAG: --tls-private-key-file="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686447 4707 flags.go:64] FLAG: --topology-manager-policy="none" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686451 4707 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686455 4707 flags.go:64] FLAG: --topology-manager-scope="container" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686459 4707 flags.go:64] FLAG: --v="2" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686464 4707 flags.go:64] FLAG: --version="false" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686471 4707 flags.go:64] FLAG: --vmodule="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686479 4707 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686483 4707 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686591 4707 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686596 4707 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686601 4707 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686605 4707 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686608 4707 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686612 4707 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686616 4707 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686619 4707 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686623 4707 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686627 4707 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686635 4707 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686638 4707 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686642 4707 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686646 4707 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686650 4707 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686655 4707 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686659 4707 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686663 4707 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686667 4707 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686670 4707 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686674 4707 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686677 4707 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686680 4707 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686684 4707 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686687 4707 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686691 4707 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686694 4707 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686697 4707 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686701 4707 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686705 4707 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686708 4707 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686712 4707 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686715 4707 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686719 4707 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686722 4707 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686726 4707 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686729 4707 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686732 4707 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686736 4707 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686740 4707 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686744 4707 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686749 4707 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686755 4707 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686759 4707 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686764 4707 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686768 4707 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686773 4707 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686778 4707 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686782 4707 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686786 4707 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686790 4707 feature_gate.go:330] unrecognized feature gate: Example Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686794 4707 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686797 4707 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686801 4707 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686805 4707 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686809 4707 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686813 4707 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686816 4707 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686821 4707 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686825 4707 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686829 4707 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686833 4707 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686836 4707 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686842 4707 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686846 4707 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686850 4707 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686853 4707 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686857 4707 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686860 4707 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686863 4707 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.686867 4707 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.686878 4707 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.697633 4707 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.697686 4707 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697813 4707 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697827 4707 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697838 4707 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697846 4707 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697855 4707 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697865 4707 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697873 4707 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697882 4707 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697890 4707 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697900 4707 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697909 4707 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697919 4707 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697932 4707 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697941 4707 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697950 4707 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697961 4707 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697969 4707 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697977 4707 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697986 4707 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.697994 4707 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698005 4707 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698014 4707 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698021 4707 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698029 4707 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698037 4707 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698045 4707 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698052 4707 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698060 4707 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698068 4707 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698075 4707 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698084 4707 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698095 4707 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698105 4707 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698113 4707 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698122 4707 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698131 4707 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698139 4707 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698147 4707 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698157 4707 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698165 4707 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698172 4707 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698180 4707 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698188 4707 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698195 4707 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698203 4707 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698211 4707 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698219 4707 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698227 4707 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698234 4707 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698242 4707 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698250 4707 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698258 4707 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698265 4707 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698273 4707 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698280 4707 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698290 4707 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698300 4707 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698310 4707 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698318 4707 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698327 4707 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698363 4707 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698372 4707 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698380 4707 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698389 4707 feature_gate.go:330] unrecognized feature gate: Example Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698396 4707 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698404 4707 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698412 4707 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698420 4707 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698428 4707 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698436 4707 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698443 4707 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.698457 4707 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698678 4707 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698691 4707 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698699 4707 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698707 4707 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698714 4707 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698722 4707 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698732 4707 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698743 4707 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698753 4707 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698764 4707 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698773 4707 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698783 4707 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698796 4707 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698810 4707 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698820 4707 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698833 4707 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698843 4707 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698853 4707 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698863 4707 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698872 4707 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698882 4707 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698891 4707 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698901 4707 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698911 4707 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698922 4707 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698933 4707 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698943 4707 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698953 4707 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698962 4707 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698972 4707 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698981 4707 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.698992 4707 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699004 4707 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699014 4707 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699023 4707 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699031 4707 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699040 4707 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699051 4707 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699061 4707 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699070 4707 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699080 4707 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699091 4707 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699100 4707 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699110 4707 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699123 4707 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699138 4707 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699150 4707 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699163 4707 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699173 4707 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699183 4707 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699193 4707 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699203 4707 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699213 4707 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699222 4707 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699231 4707 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699240 4707 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699252 4707 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699264 4707 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699276 4707 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699287 4707 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699297 4707 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699307 4707 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699317 4707 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699329 4707 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699368 4707 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699377 4707 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699385 4707 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699392 4707 feature_gate.go:330] unrecognized feature gate: Example Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699400 4707 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699408 4707 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.699415 4707 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.699428 4707 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.699959 4707 server.go:940] "Client rotation is on, will bootstrap in background" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.705861 4707 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.706019 4707 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.706888 4707 server.go:997] "Starting client certificate rotation" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.706927 4707 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.707172 4707 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-17 07:09:46.783705476 +0000 UTC Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.707263 4707 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1053h31m26.076444696s for next certificate rotation Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.715939 4707 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.718550 4707 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.727940 4707 log.go:25] "Validated CRI v1 runtime API" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.750672 4707 log.go:25] "Validated CRI v1 image API" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.753199 4707 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.756576 4707 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-04-09-34-45-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.756617 4707 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.770385 4707 manager.go:217] Machine: {Timestamp:2025-12-04 09:38:20.769260684 +0000 UTC m=+0.205083211 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654116352 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:eae3f82d-6a5b-493a-a51e-c8ee4e7acba2 BootID:3ceb1816-cf72-4648-bec6-4ad3a2135d2a Filesystems:[{Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108168 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827056128 Type:vfs Inodes:4108168 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:e2:70:30 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:e2:70:30 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:61:d7:5f Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:2a:d7:0e Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:d7:d9:17 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:66:42:e3 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:9a:24:ba:ab:27:bf Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:c6:0e:04:1e:3f:79 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654116352 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.770660 4707 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.770786 4707 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.771438 4707 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.771612 4707 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.771639 4707 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.771871 4707 topology_manager.go:138] "Creating topology manager with none policy" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.771889 4707 container_manager_linux.go:303] "Creating device plugin manager" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.772080 4707 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.772108 4707 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.772355 4707 state_mem.go:36] "Initialized new in-memory state store" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.772477 4707 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.773445 4707 kubelet.go:418] "Attempting to sync node with API server" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.773467 4707 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.773492 4707 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.773507 4707 kubelet.go:324] "Adding apiserver pod source" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.773522 4707 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.776642 4707 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.777296 4707 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.778878 4707 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.779921 4707 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.779986 4707 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.780005 4707 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.780022 4707 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.780048 4707 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.780157 4707 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.780179 4707 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.780210 4707 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.780231 4707 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.780249 4707 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.780269 4707 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.780287 4707 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.780379 4707 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.781779 4707 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.180:6443: connect: connection refused Dec 04 09:38:20 crc kubenswrapper[4707]: E1204 09:38:20.781921 4707 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.180:6443: connect: connection refused" logger="UnhandledError" Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.781805 4707 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.180:6443: connect: connection refused Dec 04 09:38:20 crc kubenswrapper[4707]: E1204 09:38:20.781989 4707 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.180:6443: connect: connection refused" logger="UnhandledError" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.782134 4707 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.180:6443: connect: connection refused Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.782541 4707 server.go:1280] "Started kubelet" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.782707 4707 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.782839 4707 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.783596 4707 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.784928 4707 server.go:460] "Adding debug handlers to kubelet server" Dec 04 09:38:20 crc systemd[1]: Started Kubernetes Kubelet. Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.786056 4707 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.786111 4707 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.786680 4707 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.786708 4707 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.786834 4707 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.786866 4707 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 14:46:27.408803208 +0000 UTC Dec 04 09:38:20 crc kubenswrapper[4707]: E1204 09:38:20.786937 4707 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 09:38:20 crc kubenswrapper[4707]: E1204 09:38:20.787753 4707 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" interval="200ms" Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.787831 4707 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.180:6443: connect: connection refused Dec 04 09:38:20 crc kubenswrapper[4707]: E1204 09:38:20.787927 4707 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.180:6443: connect: connection refused" logger="UnhandledError" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.790572 4707 factory.go:55] Registering systemd factory Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.790605 4707 factory.go:221] Registration of the systemd container factory successfully Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.794052 4707 factory.go:153] Registering CRI-O factory Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.794092 4707 factory.go:221] Registration of the crio container factory successfully Dec 04 09:38:20 crc kubenswrapper[4707]: E1204 09:38:20.793490 4707 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.180:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187df9a1712631de default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-04 09:38:20.782481886 +0000 UTC m=+0.218304433,LastTimestamp:2025-12-04 09:38:20.782481886 +0000 UTC m=+0.218304433,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.794213 4707 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.794262 4707 factory.go:103] Registering Raw factory Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.794285 4707 manager.go:1196] Started watching for new ooms in manager Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.795132 4707 manager.go:319] Starting recovery of all containers Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.805783 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.805865 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.805892 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.805909 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.805931 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.805947 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.805962 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.805984 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806015 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806040 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806058 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806081 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806096 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806121 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806139 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806159 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806205 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806220 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806239 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806254 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806273 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806286 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806301 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806324 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806366 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806393 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806444 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806473 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806496 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806520 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806541 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806596 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806616 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806634 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806659 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806678 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806706 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806725 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806746 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806770 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806790 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806818 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806837 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806857 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806886 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806910 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806935 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806956 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806975 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.806998 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807018 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807042 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807072 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807104 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807131 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807162 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807183 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807204 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807233 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807251 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807270 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807300 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807321 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807413 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807436 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807457 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807484 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807507 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807536 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807557 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807579 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807605 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807627 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807655 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807673 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807692 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807715 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807733 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807755 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807778 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807799 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807840 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807861 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807886 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807909 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807929 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807954 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807973 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.807998 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.808020 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.808041 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.808069 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.808089 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.808112 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.808131 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.808154 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810565 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810640 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810665 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810686 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810710 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810731 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810753 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810773 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810813 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810840 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810866 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810891 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810928 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810951 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810971 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.810995 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811016 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811037 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811057 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811077 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811099 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811118 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811140 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811161 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811184 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811204 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811226 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811247 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811268 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811289 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811311 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811331 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811375 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811396 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811416 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811436 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811456 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811476 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811496 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811516 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811536 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811556 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811576 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811596 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811616 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811634 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811656 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811686 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811706 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811728 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811748 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811768 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811788 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811807 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811826 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811847 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811875 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811902 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811933 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811961 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.811990 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.812016 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.812036 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.812059 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.812082 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.812105 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.812125 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.812146 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.812168 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.812190 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.812211 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813195 4707 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813246 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813271 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813296 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813316 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813364 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813392 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813425 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813447 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813468 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813489 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813510 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813532 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813554 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813580 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813609 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813637 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813669 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813694 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813717 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813737 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813759 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813783 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813803 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813824 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813843 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813863 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813883 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813903 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813925 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813945 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813967 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.813991 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.814012 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.814035 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.814059 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.814079 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.814101 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.814123 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.814142 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.814162 4707 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.814180 4707 reconstruct.go:97] "Volume reconstruction finished" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.814194 4707 reconciler.go:26] "Reconciler: start to sync state" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.820125 4707 manager.go:324] Recovery completed Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.831048 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.833078 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.833121 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.833134 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.833999 4707 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.834028 4707 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.834055 4707 state_mem.go:36] "Initialized new in-memory state store" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.839759 4707 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.843482 4707 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.843587 4707 policy_none.go:49] "None policy: Start" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.843658 4707 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.843698 4707 kubelet.go:2335] "Starting kubelet main sync loop" Dec 04 09:38:20 crc kubenswrapper[4707]: E1204 09:38:20.843764 4707 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.844720 4707 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.844762 4707 state_mem.go:35] "Initializing new in-memory state store" Dec 04 09:38:20 crc kubenswrapper[4707]: W1204 09:38:20.844755 4707 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.180:6443: connect: connection refused Dec 04 09:38:20 crc kubenswrapper[4707]: E1204 09:38:20.844815 4707 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.180:6443: connect: connection refused" logger="UnhandledError" Dec 04 09:38:20 crc kubenswrapper[4707]: E1204 09:38:20.887840 4707 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.893248 4707 manager.go:334] "Starting Device Plugin manager" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.893317 4707 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.893349 4707 server.go:79] "Starting device plugin registration server" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.893828 4707 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.893854 4707 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.894267 4707 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.894432 4707 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.894446 4707 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 04 09:38:20 crc kubenswrapper[4707]: E1204 09:38:20.905456 4707 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.944111 4707 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.944253 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.945731 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.945782 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.945794 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.945916 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.946158 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.946238 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.947066 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.947117 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.947130 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.947370 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.947499 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.947525 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.947535 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.947560 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.947624 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.948481 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.948567 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.948579 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.948742 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.948927 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.948964 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.949472 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.949493 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.949502 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.950072 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.950100 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.950109 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.950143 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.950178 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.950197 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.950228 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.950276 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.950306 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.951266 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.951274 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.951304 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.951323 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.951308 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.951397 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.951563 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.951601 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.953170 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.953201 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.953213 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:20 crc kubenswrapper[4707]: E1204 09:38:20.989213 4707 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" interval="400ms" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.994966 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.996932 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.997002 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.997015 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:20 crc kubenswrapper[4707]: I1204 09:38:20.997051 4707 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 09:38:20 crc kubenswrapper[4707]: E1204 09:38:20.997663 4707 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.180:6443: connect: connection refused" node="crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.016397 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.016470 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.016609 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.016645 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.016770 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.016841 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.016881 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.016926 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.016992 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.017064 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.017100 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.017132 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.017166 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.017197 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.017227 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118396 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118429 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118535 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118566 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118592 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118613 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118637 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118658 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118683 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118705 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118732 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118756 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118779 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118801 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118866 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.118888 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.119284 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.119388 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.119429 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.119467 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.119502 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.119542 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.119578 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.119588 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.119645 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.119617 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.119693 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.119724 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.119761 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.119778 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.198422 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.200129 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.200170 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.200180 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.200203 4707 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 09:38:21 crc kubenswrapper[4707]: E1204 09:38:21.200727 4707 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.180:6443: connect: connection refused" node="crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.280491 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.291800 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: W1204 09:38:21.307081 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-be4c7d15e6ecde974599c14da6ddaf67b35d1b6e62542a97345489b16732f449 WatchSource:0}: Error finding container be4c7d15e6ecde974599c14da6ddaf67b35d1b6e62542a97345489b16732f449: Status 404 returned error can't find the container with id be4c7d15e6ecde974599c14da6ddaf67b35d1b6e62542a97345489b16732f449 Dec 04 09:38:21 crc kubenswrapper[4707]: W1204 09:38:21.313325 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-466e98171317a7c90c36d27cece8e78310e3665ba5a751a7b580bfa274c127e5 WatchSource:0}: Error finding container 466e98171317a7c90c36d27cece8e78310e3665ba5a751a7b580bfa274c127e5: Status 404 returned error can't find the container with id 466e98171317a7c90c36d27cece8e78310e3665ba5a751a7b580bfa274c127e5 Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.314265 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: W1204 09:38:21.337389 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-90837a1d8c4d5751bd245521f70261bf44f4d8bcd074cc85289369f8db74625e WatchSource:0}: Error finding container 90837a1d8c4d5751bd245521f70261bf44f4d8bcd074cc85289369f8db74625e: Status 404 returned error can't find the container with id 90837a1d8c4d5751bd245521f70261bf44f4d8bcd074cc85289369f8db74625e Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.338640 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.343808 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:21 crc kubenswrapper[4707]: W1204 09:38:21.361027 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-ededa93363e00f0cea11dfc1974aae1ac56b18de60e811103a3c285293970daa WatchSource:0}: Error finding container ededa93363e00f0cea11dfc1974aae1ac56b18de60e811103a3c285293970daa: Status 404 returned error can't find the container with id ededa93363e00f0cea11dfc1974aae1ac56b18de60e811103a3c285293970daa Dec 04 09:38:21 crc kubenswrapper[4707]: W1204 09:38:21.363568 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-ce8e4af3f48a5b097fb9e202793a02ea2207af35f8ab82af717b9bf9704cc575 WatchSource:0}: Error finding container ce8e4af3f48a5b097fb9e202793a02ea2207af35f8ab82af717b9bf9704cc575: Status 404 returned error can't find the container with id ce8e4af3f48a5b097fb9e202793a02ea2207af35f8ab82af717b9bf9704cc575 Dec 04 09:38:21 crc kubenswrapper[4707]: E1204 09:38:21.389794 4707 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" interval="800ms" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.601842 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.603140 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.603178 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.603186 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.603207 4707 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 09:38:21 crc kubenswrapper[4707]: E1204 09:38:21.603652 4707 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.180:6443: connect: connection refused" node="crc" Dec 04 09:38:21 crc kubenswrapper[4707]: W1204 09:38:21.664150 4707 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.180:6443: connect: connection refused Dec 04 09:38:21 crc kubenswrapper[4707]: E1204 09:38:21.664956 4707 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.180:6443: connect: connection refused" logger="UnhandledError" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.783695 4707 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.180:6443: connect: connection refused Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.786981 4707 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 18:02:35.560332647 +0000 UTC Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.787071 4707 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 536h24m13.773265427s for next certificate rotation Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.851556 4707 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172" exitCode=0 Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.851635 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172"} Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.851812 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ededa93363e00f0cea11dfc1974aae1ac56b18de60e811103a3c285293970daa"} Dec 04 09:38:21 crc kubenswrapper[4707]: E1204 09:38:21.851783 4707 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.180:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187df9a1712631de default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-04 09:38:20.782481886 +0000 UTC m=+0.218304433,LastTimestamp:2025-12-04 09:38:20.782481886 +0000 UTC m=+0.218304433,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.851964 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.853182 4707 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea" exitCode=0 Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.853257 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea"} Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.853317 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"90837a1d8c4d5751bd245521f70261bf44f4d8bcd074cc85289369f8db74625e"} Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.853513 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.853551 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.853583 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.853599 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.854573 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.854622 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.854639 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.855249 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.856117 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.856130 4707 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43" exitCode=0 Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.856153 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.856193 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43"} Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.856233 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.856262 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"466e98171317a7c90c36d27cece8e78310e3665ba5a751a7b580bfa274c127e5"} Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.856384 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.857787 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.857831 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.857850 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.858537 4707 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="2f0c5e69da81dfa9a99ca0ad0cff506f4411721c4057a99f5532a7a6a87b243d" exitCode=0 Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.858624 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"2f0c5e69da81dfa9a99ca0ad0cff506f4411721c4057a99f5532a7a6a87b243d"} Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.858660 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"be4c7d15e6ecde974599c14da6ddaf67b35d1b6e62542a97345489b16732f449"} Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.858735 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.859757 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.859794 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.859810 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.860309 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899"} Dec 04 09:38:21 crc kubenswrapper[4707]: I1204 09:38:21.860375 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ce8e4af3f48a5b097fb9e202793a02ea2207af35f8ab82af717b9bf9704cc575"} Dec 04 09:38:22 crc kubenswrapper[4707]: W1204 09:38:22.080591 4707 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.180:6443: connect: connection refused Dec 04 09:38:22 crc kubenswrapper[4707]: E1204 09:38:22.080687 4707 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.180:6443: connect: connection refused" logger="UnhandledError" Dec 04 09:38:22 crc kubenswrapper[4707]: W1204 09:38:22.088673 4707 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.180:6443: connect: connection refused Dec 04 09:38:22 crc kubenswrapper[4707]: E1204 09:38:22.088755 4707 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.180:6443: connect: connection refused" logger="UnhandledError" Dec 04 09:38:22 crc kubenswrapper[4707]: W1204 09:38:22.112134 4707 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.180:6443: connect: connection refused Dec 04 09:38:22 crc kubenswrapper[4707]: E1204 09:38:22.112246 4707 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.180:6443: connect: connection refused" logger="UnhandledError" Dec 04 09:38:22 crc kubenswrapper[4707]: E1204 09:38:22.190400 4707 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" interval="1.6s" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.404889 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.412779 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.413110 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.413460 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.413569 4707 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.866180 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7"} Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.866226 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75"} Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.866237 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a"} Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.866267 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.867304 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.867366 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.867379 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.870791 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d"} Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.870846 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d"} Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.870861 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13"} Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.870871 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684"} Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.870881 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2"} Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.870977 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.871717 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.871747 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.871761 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.872594 4707 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7" exitCode=0 Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.872654 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7"} Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.872770 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.873582 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.873625 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.873635 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.876146 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0ac30713fd3273b83ee133e02d9575ce884e1732adf4b612d134c208fd9cbf0b"} Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.876184 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e160007086212e3c54a49bdfcd7a43776b81c7561b18d963f0667181e2238c08"} Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.876202 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b948fa6cd68a641df1c78251110c4e49e944be9affe87715c5b80be44f60ca00"} Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.876291 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.877075 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.877102 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.877113 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.878636 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"b2544d1a5b5d9a189ba22315acf681b65c61c2c8cfc584cd489b60ec924a87d8"} Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.878779 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.879577 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.879609 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:22 crc kubenswrapper[4707]: I1204 09:38:22.879621 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.205074 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.884106 4707 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742" exitCode=0 Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.884234 4707 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.884271 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.884279 4707 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.884281 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742"} Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.884366 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.884399 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.884474 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.885489 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.885527 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.885542 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.885860 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.885897 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.885908 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.885906 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.885940 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.885958 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.886587 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.886634 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:23 crc kubenswrapper[4707]: I1204 09:38:23.886654 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:24 crc kubenswrapper[4707]: I1204 09:38:24.893065 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707"} Dec 04 09:38:24 crc kubenswrapper[4707]: I1204 09:38:24.893127 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791"} Dec 04 09:38:24 crc kubenswrapper[4707]: I1204 09:38:24.893149 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568"} Dec 04 09:38:24 crc kubenswrapper[4707]: I1204 09:38:24.893166 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3"} Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.519461 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.519755 4707 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.519811 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.521321 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.521384 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.521394 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.868922 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.869177 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.870815 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.870855 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.870866 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.902312 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a"} Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.902480 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.903603 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.903647 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:25 crc kubenswrapper[4707]: I1204 09:38:25.903659 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:26 crc kubenswrapper[4707]: I1204 09:38:26.905265 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:26 crc kubenswrapper[4707]: I1204 09:38:26.906381 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:26 crc kubenswrapper[4707]: I1204 09:38:26.906431 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:26 crc kubenswrapper[4707]: I1204 09:38:26.906463 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:28 crc kubenswrapper[4707]: I1204 09:38:28.869417 4707 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 04 09:38:28 crc kubenswrapper[4707]: I1204 09:38:28.869498 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 09:38:29 crc kubenswrapper[4707]: I1204 09:38:29.201190 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 04 09:38:29 crc kubenswrapper[4707]: I1204 09:38:29.201443 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:29 crc kubenswrapper[4707]: I1204 09:38:29.202818 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:29 crc kubenswrapper[4707]: I1204 09:38:29.202892 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:29 crc kubenswrapper[4707]: I1204 09:38:29.202911 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:29 crc kubenswrapper[4707]: I1204 09:38:29.860318 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 09:38:29 crc kubenswrapper[4707]: I1204 09:38:29.860631 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:29 crc kubenswrapper[4707]: I1204 09:38:29.862045 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:29 crc kubenswrapper[4707]: I1204 09:38:29.862132 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:29 crc kubenswrapper[4707]: I1204 09:38:29.862167 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:30 crc kubenswrapper[4707]: I1204 09:38:30.390293 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 04 09:38:30 crc kubenswrapper[4707]: I1204 09:38:30.390576 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:30 crc kubenswrapper[4707]: I1204 09:38:30.392028 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:30 crc kubenswrapper[4707]: I1204 09:38:30.392084 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:30 crc kubenswrapper[4707]: I1204 09:38:30.392114 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:30 crc kubenswrapper[4707]: I1204 09:38:30.440789 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:30 crc kubenswrapper[4707]: I1204 09:38:30.441047 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:30 crc kubenswrapper[4707]: I1204 09:38:30.442655 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:30 crc kubenswrapper[4707]: I1204 09:38:30.442689 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:30 crc kubenswrapper[4707]: I1204 09:38:30.442699 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:30 crc kubenswrapper[4707]: E1204 09:38:30.905595 4707 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 04 09:38:31 crc kubenswrapper[4707]: I1204 09:38:31.062465 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:31 crc kubenswrapper[4707]: I1204 09:38:31.062612 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:31 crc kubenswrapper[4707]: I1204 09:38:31.063814 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:31 crc kubenswrapper[4707]: I1204 09:38:31.063874 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:31 crc kubenswrapper[4707]: I1204 09:38:31.063889 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:31 crc kubenswrapper[4707]: I1204 09:38:31.557445 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:31 crc kubenswrapper[4707]: I1204 09:38:31.919088 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:31 crc kubenswrapper[4707]: I1204 09:38:31.920694 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:31 crc kubenswrapper[4707]: I1204 09:38:31.920754 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:31 crc kubenswrapper[4707]: I1204 09:38:31.920774 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:32 crc kubenswrapper[4707]: I1204 09:38:32.073843 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:32 crc kubenswrapper[4707]: I1204 09:38:32.080955 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:32 crc kubenswrapper[4707]: E1204 09:38:32.416613 4707 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 04 09:38:32 crc kubenswrapper[4707]: I1204 09:38:32.784357 4707 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 04 09:38:32 crc kubenswrapper[4707]: I1204 09:38:32.920862 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:32 crc kubenswrapper[4707]: I1204 09:38:32.923761 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:32 crc kubenswrapper[4707]: I1204 09:38:32.924203 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:32 crc kubenswrapper[4707]: I1204 09:38:32.924830 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:32 crc kubenswrapper[4707]: I1204 09:38:32.927266 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:33 crc kubenswrapper[4707]: W1204 09:38:33.495508 4707 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 04 09:38:33 crc kubenswrapper[4707]: I1204 09:38:33.495627 4707 trace.go:236] Trace[1791762783]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Dec-2025 09:38:23.493) (total time: 10002ms): Dec 04 09:38:33 crc kubenswrapper[4707]: Trace[1791762783]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (09:38:33.495) Dec 04 09:38:33 crc kubenswrapper[4707]: Trace[1791762783]: [10.002081538s] [10.002081538s] END Dec 04 09:38:33 crc kubenswrapper[4707]: E1204 09:38:33.495657 4707 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 04 09:38:33 crc kubenswrapper[4707]: I1204 09:38:33.701115 4707 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Dec 04 09:38:33 crc kubenswrapper[4707]: I1204 09:38:33.701184 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 04 09:38:33 crc kubenswrapper[4707]: I1204 09:38:33.709515 4707 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Dec 04 09:38:33 crc kubenswrapper[4707]: I1204 09:38:33.709611 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 04 09:38:33 crc kubenswrapper[4707]: I1204 09:38:33.923969 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:33 crc kubenswrapper[4707]: I1204 09:38:33.925185 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:33 crc kubenswrapper[4707]: I1204 09:38:33.925238 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:33 crc kubenswrapper[4707]: I1204 09:38:33.925259 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:34 crc kubenswrapper[4707]: I1204 09:38:34.017405 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:34 crc kubenswrapper[4707]: I1204 09:38:34.018699 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:34 crc kubenswrapper[4707]: I1204 09:38:34.018766 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:34 crc kubenswrapper[4707]: I1204 09:38:34.018784 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:34 crc kubenswrapper[4707]: I1204 09:38:34.018822 4707 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 09:38:34 crc kubenswrapper[4707]: I1204 09:38:34.926799 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:34 crc kubenswrapper[4707]: I1204 09:38:34.928002 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:34 crc kubenswrapper[4707]: I1204 09:38:34.928064 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:34 crc kubenswrapper[4707]: I1204 09:38:34.928074 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:35 crc kubenswrapper[4707]: I1204 09:38:35.530258 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:35 crc kubenswrapper[4707]: I1204 09:38:35.530550 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:35 crc kubenswrapper[4707]: I1204 09:38:35.531869 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:35 crc kubenswrapper[4707]: I1204 09:38:35.531968 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:35 crc kubenswrapper[4707]: I1204 09:38:35.532041 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:35 crc kubenswrapper[4707]: I1204 09:38:35.536536 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:35 crc kubenswrapper[4707]: I1204 09:38:35.929372 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:35 crc kubenswrapper[4707]: I1204 09:38:35.930549 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:35 crc kubenswrapper[4707]: I1204 09:38:35.930620 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:35 crc kubenswrapper[4707]: I1204 09:38:35.930634 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.586097 4707 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.693010 4707 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.694267 4707 trace.go:236] Trace[419992061]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Dec-2025 09:38:25.271) (total time: 13423ms): Dec 04 09:38:38 crc kubenswrapper[4707]: Trace[419992061]: ---"Objects listed" error: 13423ms (09:38:38.694) Dec 04 09:38:38 crc kubenswrapper[4707]: Trace[419992061]: [13.423175453s] [13.423175453s] END Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.694289 4707 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.694482 4707 trace.go:236] Trace[1316457869]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Dec-2025 09:38:24.267) (total time: 14427ms): Dec 04 09:38:38 crc kubenswrapper[4707]: Trace[1316457869]: ---"Objects listed" error: 14427ms (09:38:38.694) Dec 04 09:38:38 crc kubenswrapper[4707]: Trace[1316457869]: [14.42713423s] [14.42713423s] END Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.694492 4707 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.695968 4707 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.696774 4707 trace.go:236] Trace[1629077956]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Dec-2025 09:38:24.266) (total time: 14430ms): Dec 04 09:38:38 crc kubenswrapper[4707]: Trace[1629077956]: ---"Objects listed" error: 14430ms (09:38:38.696) Dec 04 09:38:38 crc kubenswrapper[4707]: Trace[1629077956]: [14.430190236s] [14.430190236s] END Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.696792 4707 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.729643 4707 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:52820->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.729747 4707 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:52820->192.168.126.11:17697: read: connection reset by peer" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.730190 4707 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.730248 4707 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.765432 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.770698 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.783709 4707 apiserver.go:52] "Watching apiserver" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.787189 4707 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.787427 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.787793 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.787818 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.787868 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.787888 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.787933 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.787995 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.788170 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.788204 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.788283 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.790937 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.791172 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.791387 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.791472 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.791766 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.791797 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.792765 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.792782 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.793139 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.820592 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.832269 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.843684 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.864425 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.880752 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.887509 4707 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.893418 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897315 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897389 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897411 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897448 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897468 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897487 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897521 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897538 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897556 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897604 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897627 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897647 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897688 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897710 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897729 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897768 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897858 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897888 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897871 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897914 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.897929 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898019 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898063 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898065 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898096 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898130 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898158 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898191 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898270 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898300 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898357 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898393 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898403 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898424 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898503 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898547 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898582 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898586 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898637 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898669 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898716 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898742 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898782 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898804 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898825 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898675 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.898869 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.899088 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.899152 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.899250 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.899553 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.899558 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.899567 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.899571 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.899790 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.899928 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.900017 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.900058 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.900196 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.900306 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.900469 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.901150 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.901187 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.901234 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.901259 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.901536 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.901619 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.901728 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.902079 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.902097 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.902193 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.902652 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.902695 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.902958 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.902999 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903003 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903103 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903057 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903148 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903169 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903185 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903196 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903254 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903284 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903309 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903356 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903382 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903408 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903431 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903454 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903479 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903504 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903527 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903552 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903578 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903599 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903622 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903647 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903670 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903695 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903724 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903747 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903771 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903794 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903816 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903838 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903859 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903880 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903903 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903926 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903990 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904012 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904035 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904056 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904079 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904104 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904127 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904149 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904181 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904204 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904227 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904255 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904278 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904304 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904328 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904369 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904399 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904423 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904446 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904470 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904493 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904517 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904539 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904562 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904589 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904615 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904641 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904688 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904714 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904741 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904766 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904794 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904819 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904843 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904870 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904894 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904917 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904941 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904967 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904991 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905014 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905036 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905059 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905080 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905105 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905130 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905153 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905175 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905197 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905220 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905243 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905267 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905291 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905316 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905357 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905380 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905403 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905429 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905456 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905484 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905511 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905540 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905563 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905588 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905612 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905636 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905659 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905683 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905709 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905733 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905759 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905783 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905808 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905832 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905859 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905884 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905907 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905932 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905957 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905981 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906003 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906030 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906053 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906076 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906099 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906133 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906208 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906245 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906281 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906315 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906542 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906592 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906642 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906674 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906709 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906741 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906772 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906810 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906844 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906875 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906908 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903690 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.903848 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904163 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904226 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904262 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904607 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.904827 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905083 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905257 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.908619 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905427 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905610 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905825 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.905899 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906186 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906219 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906270 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906574 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906588 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.906649 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.907021 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.907260 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.907286 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.907409 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.907470 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.907510 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.907583 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:38:39.407551209 +0000 UTC m=+18.843373726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.907755 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.907818 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.907904 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.907917 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.908073 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.908357 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.908840 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.908822 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.908884 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.909057 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.909089 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.909119 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.909408 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.910296 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.910412 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.910427 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.909551 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.907070 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.909567 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.909714 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.910126 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.910200 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.911070 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.912187 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.912538 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.912604 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.912739 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.912824 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913106 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913166 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913204 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913229 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913259 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913299 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913323 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913397 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913425 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913444 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913448 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913484 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913507 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913598 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913624 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913648 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913666 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913690 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913745 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913776 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913799 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913823 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913848 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913871 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913892 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913911 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913931 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913951 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913976 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914001 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914021 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914048 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914166 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914180 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914193 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914207 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914222 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914239 4707 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914256 4707 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914269 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914284 4707 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914297 4707 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914366 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914377 4707 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914416 4707 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914433 4707 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914484 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914501 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914537 4707 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914550 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914562 4707 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914577 4707 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914589 4707 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914601 4707 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914639 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914658 4707 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914672 4707 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914703 4707 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914714 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914724 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914736 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914752 4707 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914764 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914774 4707 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914785 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914794 4707 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914804 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914814 4707 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914825 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914834 4707 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914844 4707 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914867 4707 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914877 4707 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914890 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.915573 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916164 4707 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913547 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916194 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913599 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916233 4707 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913795 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913942 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.913688 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914313 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916202 4707 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914411 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916352 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916375 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914473 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916394 4707 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916415 4707 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914747 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916430 4707 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916443 4707 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916458 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916472 4707 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914791 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914842 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914867 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.915054 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.915061 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916486 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916567 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916582 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916593 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916875 4707 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916890 4707 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916903 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916931 4707 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916942 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916952 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916963 4707 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.915151 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.917130 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.917137 4707 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.915199 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.915367 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.917227 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.917257 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:39.41722756 +0000 UTC m=+18.853050247 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.915285 4707 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.917292 4707 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.915603 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.915671 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.915749 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.915998 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.914230 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916822 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.916843 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.917375 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:39.417356574 +0000 UTC m=+18.853179291 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.915555 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.917481 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.917506 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.917818 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.917842 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.917871 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.918125 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.918458 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.918965 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.919087 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.919240 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.919404 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.918324 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.919578 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.919724 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.919911 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.920274 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.920460 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.920936 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.921443 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.921821 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.921872 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.921884 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.922431 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.922795 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.922888 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.923994 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.924000 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.924298 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.924509 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.924533 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.925021 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.925037 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.925048 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.925055 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.925533 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.926933 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.927240 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.927534 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.930401 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.932589 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.934106 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.934269 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.936823 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.943864 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.944199 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.944224 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.944237 4707 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.944311 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:39.44426888 +0000 UTC m=+18.880091387 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.954995 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.955801 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.956855 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.957646 4707 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d" exitCode=255 Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.957859 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d"} Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.958264 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.959592 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.959667 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.959887 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.959963 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.960180 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.960246 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.960475 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.960734 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.960950 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.963812 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.971898 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.972399 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.972442 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.972568 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.973013 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.973056 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.973072 4707 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.973139 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:39.473115987 +0000 UTC m=+18.908938494 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:38 crc kubenswrapper[4707]: E1204 09:38:38.973294 4707 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.978558 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.980797 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.981005 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.981791 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.985433 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.985588 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.985712 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.985893 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.985484 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.987107 4707 scope.go:117] "RemoveContainer" containerID="426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.987519 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.987878 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.990758 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.991080 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.991167 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.993427 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.993565 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.993674 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.997943 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.998356 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.998426 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.998461 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.998832 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 09:38:38 crc kubenswrapper[4707]: I1204 09:38:38.998928 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.008375 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020186 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020667 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020745 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020754 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020841 4707 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020853 4707 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020863 4707 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020872 4707 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020880 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020888 4707 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020896 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020905 4707 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020915 4707 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020923 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020931 4707 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020939 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020948 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020956 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020963 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020971 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020981 4707 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020989 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.020997 4707 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021007 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021015 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021024 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021024 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021032 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021081 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021096 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021109 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021132 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021147 4707 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021159 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021171 4707 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021183 4707 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021196 4707 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021208 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021220 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021234 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021246 4707 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021259 4707 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021292 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021305 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021316 4707 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021349 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021360 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021372 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021384 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021398 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021410 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021424 4707 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021437 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021450 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021463 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021474 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021485 4707 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021497 4707 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021508 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021521 4707 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021532 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021543 4707 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021554 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021566 4707 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021577 4707 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021588 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021601 4707 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021613 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021627 4707 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021638 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021649 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021661 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021673 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021684 4707 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021695 4707 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021707 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021718 4707 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021730 4707 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021741 4707 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021753 4707 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021764 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021774 4707 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021785 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021796 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021808 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021819 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021830 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021839 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021849 4707 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021863 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021875 4707 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021888 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021913 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021923 4707 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021934 4707 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021961 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021972 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021984 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.021993 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022003 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022012 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022023 4707 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022033 4707 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022043 4707 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022052 4707 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022063 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022073 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022083 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022109 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022120 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022130 4707 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022140 4707 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022150 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022162 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022174 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022184 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022196 4707 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022207 4707 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022217 4707 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022228 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022239 4707 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022250 4707 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022261 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022272 4707 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022283 4707 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022294 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022306 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022319 4707 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022354 4707 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022368 4707 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.022380 4707 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.023132 4707 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.026278 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.034227 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.034650 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.036412 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.039585 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.049847 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.061632 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.073211 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.086221 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.101784 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.102819 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.112241 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.112435 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.118229 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 09:38:39 crc kubenswrapper[4707]: W1204 09:38:39.119666 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-4c9f32ca2fd0deeb492576c4c135853f808dbb699b4bb5cbbdb6547f3d0bc20b WatchSource:0}: Error finding container 4c9f32ca2fd0deeb492576c4c135853f808dbb699b4bb5cbbdb6547f3d0bc20b: Status 404 returned error can't find the container with id 4c9f32ca2fd0deeb492576c4c135853f808dbb699b4bb5cbbdb6547f3d0bc20b Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.122791 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.122815 4707 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.122827 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.122838 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.239949 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.259673 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.259772 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.262073 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.269276 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.284618 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.298621 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.318569 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.359659 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.392795 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.426685 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.426804 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.426869 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:38:40.426818464 +0000 UTC m=+19.862640971 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.426886 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.426946 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.426969 4707 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.427057 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:40.427031 +0000 UTC m=+19.862853517 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.427070 4707 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.427115 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:40.427108343 +0000 UTC m=+19.862930850 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.442077 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.454482 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.467653 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.483697 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.508815 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.524557 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.527866 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.527920 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.528077 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.528097 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.528107 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.528147 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.528113 4707 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.528166 4707 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.528232 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:40.528209996 +0000 UTC m=+19.964032673 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.528256 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:40.528246487 +0000 UTC m=+19.964069224 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.543307 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.556906 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.569130 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.814247 4707 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.844564 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:39 crc kubenswrapper[4707]: I1204 09:38:39.844577 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.844722 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:38:39 crc kubenswrapper[4707]: E1204 09:38:39.844882 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.434561 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.434689 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.434737 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.434837 4707 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.434895 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:42.434879565 +0000 UTC m=+21.870702072 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.434955 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:38:42.434946817 +0000 UTC m=+21.870769324 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.435017 4707 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.435045 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:42.43503757 +0000 UTC m=+21.870860077 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.460625 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169"} Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.460683 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"f46c06150e3ec7fd3c744b82a322995efaf27ab33d5f2b96582a0b0a9ad23c32"} Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.468259 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"d7cd8e5032e9c40e3d7212f1f695429f84a4058c1675b5ac8198b9ebd4459497"} Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.470246 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027"} Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.470278 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400"} Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.470293 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4c9f32ca2fd0deeb492576c4c135853f808dbb699b4bb5cbbdb6547f3d0bc20b"} Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.472884 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.475662 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276"} Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.475704 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.485816 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.491909 4707 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.509038 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.535964 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.536100 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.536254 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.536310 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.536356 4707 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.536437 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:42.536409891 +0000 UTC m=+21.972232428 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.536808 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.536841 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.536853 4707 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.536889 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:42.536876996 +0000 UTC m=+21.972699683 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.601403 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.623637 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.639204 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.654294 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.671485 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.691907 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.710511 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.726369 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.739395 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.762051 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.784899 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.801941 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.822693 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.836654 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.844138 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:40 crc kubenswrapper[4707]: E1204 09:38:40.844287 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.847785 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.848267 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.849575 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.850153 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.851115 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.851638 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.852205 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.853104 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.853707 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.854597 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.855159 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.856212 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.856675 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.857149 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.858107 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.858626 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.859540 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.859924 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.860501 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.863287 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.863612 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.863872 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.865040 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.865640 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.866866 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.867355 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.868041 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.869966 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.870535 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.871528 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.872005 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.872851 4707 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.872949 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.874758 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.875880 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.876512 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.878360 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.878659 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.879175 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.880227 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.881007 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.882096 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.882693 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.883758 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.884319 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.885275 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.885737 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.886602 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.887106 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.888135 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.888657 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.889460 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.889899 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.890748 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.891273 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.891737 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.895840 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.921689 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.936776 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.949292 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.966183 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:40 crc kubenswrapper[4707]: I1204 09:38:40.986040 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.009521 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.029491 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.047525 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.221568 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-2wjkm"] Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.221935 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-2wjkm" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.225528 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.225614 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.225656 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.264415 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.286754 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.312122 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.337327 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.342440 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e90cc2b8-9e61-4b1c-9344-7561316fa30a-hosts-file\") pod \"node-resolver-2wjkm\" (UID: \"e90cc2b8-9e61-4b1c-9344-7561316fa30a\") " pod="openshift-dns/node-resolver-2wjkm" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.342514 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwr4b\" (UniqueName: \"kubernetes.io/projected/e90cc2b8-9e61-4b1c-9344-7561316fa30a-kube-api-access-bwr4b\") pod \"node-resolver-2wjkm\" (UID: \"e90cc2b8-9e61-4b1c-9344-7561316fa30a\") " pod="openshift-dns/node-resolver-2wjkm" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.368610 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.389947 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.401496 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.410930 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.422827 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.434790 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.443777 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e90cc2b8-9e61-4b1c-9344-7561316fa30a-hosts-file\") pod \"node-resolver-2wjkm\" (UID: \"e90cc2b8-9e61-4b1c-9344-7561316fa30a\") " pod="openshift-dns/node-resolver-2wjkm" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.443845 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwr4b\" (UniqueName: \"kubernetes.io/projected/e90cc2b8-9e61-4b1c-9344-7561316fa30a-kube-api-access-bwr4b\") pod \"node-resolver-2wjkm\" (UID: \"e90cc2b8-9e61-4b1c-9344-7561316fa30a\") " pod="openshift-dns/node-resolver-2wjkm" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.443907 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e90cc2b8-9e61-4b1c-9344-7561316fa30a-hosts-file\") pod \"node-resolver-2wjkm\" (UID: \"e90cc2b8-9e61-4b1c-9344-7561316fa30a\") " pod="openshift-dns/node-resolver-2wjkm" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.460232 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwr4b\" (UniqueName: \"kubernetes.io/projected/e90cc2b8-9e61-4b1c-9344-7561316fa30a-kube-api-access-bwr4b\") pod \"node-resolver-2wjkm\" (UID: \"e90cc2b8-9e61-4b1c-9344-7561316fa30a\") " pod="openshift-dns/node-resolver-2wjkm" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.535116 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-2wjkm" Dec 04 09:38:41 crc kubenswrapper[4707]: W1204 09:38:41.553293 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode90cc2b8_9e61_4b1c_9344_7561316fa30a.slice/crio-99c202af500f515caa7317071e3233aea1521430b4afd7cd53758da7a8d3e50a WatchSource:0}: Error finding container 99c202af500f515caa7317071e3233aea1521430b4afd7cd53758da7a8d3e50a: Status 404 returned error can't find the container with id 99c202af500f515caa7317071e3233aea1521430b4afd7cd53758da7a8d3e50a Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.844869 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:41 crc kubenswrapper[4707]: E1204 09:38:41.845076 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:38:41 crc kubenswrapper[4707]: I1204 09:38:41.845554 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:41 crc kubenswrapper[4707]: E1204 09:38:41.845613 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.056450 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-c244z"] Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.056922 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.058866 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.058920 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.058975 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.059108 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.061433 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.079078 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.090912 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.102366 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.114733 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.127877 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.140089 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.154194 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e64897e0-4162-4aa8-9c13-8a4262a3ca3d-proxy-tls\") pod \"machine-config-daemon-c244z\" (UID: \"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\") " pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.154243 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e64897e0-4162-4aa8-9c13-8a4262a3ca3d-mcd-auth-proxy-config\") pod \"machine-config-daemon-c244z\" (UID: \"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\") " pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.154270 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e64897e0-4162-4aa8-9c13-8a4262a3ca3d-rootfs\") pod \"machine-config-daemon-c244z\" (UID: \"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\") " pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.154312 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjwl7\" (UniqueName: \"kubernetes.io/projected/e64897e0-4162-4aa8-9c13-8a4262a3ca3d-kube-api-access-cjwl7\") pod \"machine-config-daemon-c244z\" (UID: \"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\") " pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.160409 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.172488 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.185941 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.202112 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.216131 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.224273 4707 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.226627 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.226678 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.226691 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.226785 4707 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.232715 4707 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.233079 4707 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.234180 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.234224 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.234234 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.234252 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.234264 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:42Z","lastTransitionTime":"2025-12-04T09:38:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.253812 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.254922 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e64897e0-4162-4aa8-9c13-8a4262a3ca3d-proxy-tls\") pod \"machine-config-daemon-c244z\" (UID: \"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\") " pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.254973 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e64897e0-4162-4aa8-9c13-8a4262a3ca3d-mcd-auth-proxy-config\") pod \"machine-config-daemon-c244z\" (UID: \"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\") " pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.255002 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e64897e0-4162-4aa8-9c13-8a4262a3ca3d-rootfs\") pod \"machine-config-daemon-c244z\" (UID: \"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\") " pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.255057 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjwl7\" (UniqueName: \"kubernetes.io/projected/e64897e0-4162-4aa8-9c13-8a4262a3ca3d-kube-api-access-cjwl7\") pod \"machine-config-daemon-c244z\" (UID: \"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\") " pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.255238 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e64897e0-4162-4aa8-9c13-8a4262a3ca3d-rootfs\") pod \"machine-config-daemon-c244z\" (UID: \"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\") " pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.256766 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e64897e0-4162-4aa8-9c13-8a4262a3ca3d-mcd-auth-proxy-config\") pod \"machine-config-daemon-c244z\" (UID: \"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\") " pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.258822 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.258857 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.258870 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.258894 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.258908 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:42Z","lastTransitionTime":"2025-12-04T09:38:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.260047 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e64897e0-4162-4aa8-9c13-8a4262a3ca3d-proxy-tls\") pod \"machine-config-daemon-c244z\" (UID: \"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\") " pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.271730 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjwl7\" (UniqueName: \"kubernetes.io/projected/e64897e0-4162-4aa8-9c13-8a4262a3ca3d-kube-api-access-cjwl7\") pod \"machine-config-daemon-c244z\" (UID: \"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\") " pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.273541 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.278069 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.278126 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.278138 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.278159 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.278171 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:42Z","lastTransitionTime":"2025-12-04T09:38:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.295298 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.298635 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.298699 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.298722 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.298740 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.298752 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:42Z","lastTransitionTime":"2025-12-04T09:38:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.314715 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.318195 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.318241 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.318255 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.318271 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.318283 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:42Z","lastTransitionTime":"2025-12-04T09:38:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.331974 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.332118 4707 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.333594 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.333641 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.333651 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.333665 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.333673 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:42Z","lastTransitionTime":"2025-12-04T09:38:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.369181 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:38:42 crc kubenswrapper[4707]: W1204 09:38:42.386293 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode64897e0_4162_4aa8_9c13_8a4262a3ca3d.slice/crio-dfd091cb8d74e2c5f227927364811ae0ee28108ecc6b4908a883a876970ef127 WatchSource:0}: Error finding container dfd091cb8d74e2c5f227927364811ae0ee28108ecc6b4908a883a876970ef127: Status 404 returned error can't find the container with id dfd091cb8d74e2c5f227927364811ae0ee28108ecc6b4908a883a876970ef127 Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.433476 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-bk2sb"] Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.434034 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.435470 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.435506 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.435521 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.435537 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.435541 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6nd57"] Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.435549 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:42Z","lastTransitionTime":"2025-12-04T09:38:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.436413 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.436646 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-npc85"] Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.437142 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.437461 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.437779 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.438029 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.438175 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.438381 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.440606 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.440887 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.441289 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.441430 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.441294 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.441859 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.442419 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.443540 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.445660 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.452156 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.456306 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.456430 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:38:46.456411227 +0000 UTC m=+25.892233734 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.456462 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.456517 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.456609 4707 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.456741 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:46.456705775 +0000 UTC m=+25.892528282 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.456610 4707 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.456807 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:46.456795068 +0000 UTC m=+25.892617575 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.468059 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.479807 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-2wjkm" event={"ID":"e90cc2b8-9e61-4b1c-9344-7561316fa30a","Type":"ContainerStarted","Data":"be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8"} Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.480047 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-2wjkm" event={"ID":"e90cc2b8-9e61-4b1c-9344-7561316fa30a","Type":"ContainerStarted","Data":"99c202af500f515caa7317071e3233aea1521430b4afd7cd53758da7a8d3e50a"} Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.480952 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerStarted","Data":"dfd091cb8d74e2c5f227927364811ae0ee28108ecc6b4908a883a876970ef127"} Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.482769 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.483236 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b"} Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.495370 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.505971 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.519638 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.531958 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.538150 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.538194 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.538206 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.538226 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.538240 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:42Z","lastTransitionTime":"2025-12-04T09:38:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.545446 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558490 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-var-lib-kubelet\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558543 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-systemd\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558565 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-cnibin\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558594 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-os-release\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558611 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-multus-daemon-config\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558631 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-run-netns\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558648 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-run-netns\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558666 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-env-overrides\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558688 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfcrd\" (UniqueName: \"kubernetes.io/projected/a3e11cde-e689-4b58-b238-08e945d8de0b-kube-api-access-kfcrd\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558708 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-var-lib-openvswitch\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558729 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558764 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-slash\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558783 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-var-lib-cni-multus\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558801 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-multus-conf-dir\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558833 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-etc-kubernetes\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558854 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a3e11cde-e689-4b58-b238-08e945d8de0b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558873 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-kubelet\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558890 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-var-lib-cni-bin\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558910 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a3e11cde-e689-4b58-b238-08e945d8de0b-os-release\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558928 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f472b\" (UniqueName: \"kubernetes.io/projected/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-kube-api-access-f472b\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558946 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-system-cni-dir\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558964 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-multus-cni-dir\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.558986 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-run-ovn-kubernetes\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559008 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a3e11cde-e689-4b58-b238-08e945d8de0b-cnibin\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559037 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559058 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-cni-netd\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559076 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r6rx\" (UniqueName: \"kubernetes.io/projected/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-kube-api-access-4r6rx\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559099 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-run-k8s-cni-cncf-io\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559118 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-run-multus-certs\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559142 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-multus-socket-dir-parent\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559173 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a3e11cde-e689-4b58-b238-08e945d8de0b-system-cni-dir\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559203 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a3e11cde-e689-4b58-b238-08e945d8de0b-cni-binary-copy\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559236 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-cni-bin\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559277 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovnkube-config\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559303 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovn-node-metrics-cert\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559352 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-systemd-units\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559374 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-ovn\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559403 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559428 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-etc-openvswitch\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.559481 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-openvswitch\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.559778 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.559800 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.559813 4707 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.559857 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:46.559841552 +0000 UTC m=+25.995664059 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.560014 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.560043 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.560055 4707 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.560106 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:46.56009258 +0000 UTC m=+25.995915087 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.560154 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovnkube-script-lib\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.560183 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-hostroot\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.560213 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-node-log\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.560268 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-log-socket\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.560287 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-cni-binary-copy\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.560304 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a3e11cde-e689-4b58-b238-08e945d8de0b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.561861 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.581053 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.593726 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.607655 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.624231 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.639665 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.641951 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.642114 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.642189 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.642295 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.642439 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:42Z","lastTransitionTime":"2025-12-04T09:38:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.654061 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661035 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-var-lib-kubelet\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661311 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-systemd\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661461 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-cnibin\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661572 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-cnibin\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661212 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-var-lib-kubelet\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661584 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-os-release\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661695 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-run-netns\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661388 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-systemd\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661720 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-run-netns\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661747 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-multus-daemon-config\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661757 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-run-netns\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661806 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-run-netns\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661836 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-var-lib-openvswitch\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661880 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661911 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-env-overrides\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661914 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-var-lib-openvswitch\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661939 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfcrd\" (UniqueName: \"kubernetes.io/projected/a3e11cde-e689-4b58-b238-08e945d8de0b-kube-api-access-kfcrd\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661957 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661968 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-var-lib-cni-multus\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.661995 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-multus-conf-dir\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662036 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-slash\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662061 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-etc-kubernetes\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662085 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a3e11cde-e689-4b58-b238-08e945d8de0b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662110 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-kubelet\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662132 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-var-lib-cni-bin\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662156 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a3e11cde-e689-4b58-b238-08e945d8de0b-os-release\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662180 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-run-ovn-kubernetes\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662212 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f472b\" (UniqueName: \"kubernetes.io/projected/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-kube-api-access-f472b\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662250 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-system-cni-dir\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662273 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-multus-cni-dir\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662314 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-cni-netd\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662351 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r6rx\" (UniqueName: \"kubernetes.io/projected/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-kube-api-access-4r6rx\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662375 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a3e11cde-e689-4b58-b238-08e945d8de0b-cnibin\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662398 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-run-multus-certs\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662430 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-run-k8s-cni-cncf-io\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662468 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-multus-socket-dir-parent\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662492 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a3e11cde-e689-4b58-b238-08e945d8de0b-system-cni-dir\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662515 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a3e11cde-e689-4b58-b238-08e945d8de0b-cni-binary-copy\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662540 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-systemd-units\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662545 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-env-overrides\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662566 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-ovn\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662592 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-cni-bin\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662598 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-multus-daemon-config\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662616 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovnkube-config\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662651 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovn-node-metrics-cert\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662663 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-etc-kubernetes\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662703 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-etc-openvswitch\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662725 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-openvswitch\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662750 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovnkube-script-lib\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662775 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-node-log\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662801 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-hostroot\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662820 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-var-lib-cni-multus\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662826 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-cni-binary-copy\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662870 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a3e11cde-e689-4b58-b238-08e945d8de0b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662897 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-log-socket\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.662981 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-log-socket\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663017 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-multus-conf-dir\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663051 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-slash\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663296 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a3e11cde-e689-4b58-b238-08e945d8de0b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663403 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a3e11cde-e689-4b58-b238-08e945d8de0b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663440 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-kubelet\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663464 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-var-lib-cni-bin\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663504 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a3e11cde-e689-4b58-b238-08e945d8de0b-os-release\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663526 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-run-ovn-kubernetes\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663549 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a3e11cde-e689-4b58-b238-08e945d8de0b-cnibin\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663573 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-cni-binary-copy\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663634 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-system-cni-dir\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663634 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-systemd-units\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663663 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-ovn\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663705 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-cni-bin\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663751 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a3e11cde-e689-4b58-b238-08e945d8de0b-cni-binary-copy\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663765 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-multus-cni-dir\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663791 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-run-multus-certs\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663792 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-cni-netd\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663829 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-host-run-k8s-cni-cncf-io\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663873 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-multus-socket-dir-parent\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663905 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a3e11cde-e689-4b58-b238-08e945d8de0b-system-cni-dir\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663944 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-openvswitch\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663975 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-etc-openvswitch\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.663999 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-node-log\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.664319 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovnkube-config\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.664367 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-hostroot\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.664513 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovnkube-script-lib\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.665111 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-os-release\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.672760 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovn-node-metrics-cert\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.678506 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.682106 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r6rx\" (UniqueName: \"kubernetes.io/projected/e9d3467a-1f4a-4d54-97b3-c7fd062eff13-kube-api-access-4r6rx\") pod \"multus-npc85\" (UID: \"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\") " pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.684788 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f472b\" (UniqueName: \"kubernetes.io/projected/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-kube-api-access-f472b\") pod \"ovnkube-node-6nd57\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.685889 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfcrd\" (UniqueName: \"kubernetes.io/projected/a3e11cde-e689-4b58-b238-08e945d8de0b-kube-api-access-kfcrd\") pod \"multus-additional-cni-plugins-bk2sb\" (UID: \"a3e11cde-e689-4b58-b238-08e945d8de0b\") " pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.691387 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.702140 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.713550 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.727320 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.744642 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.744679 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.744688 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.744706 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.744716 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:42Z","lastTransitionTime":"2025-12-04T09:38:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.750581 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.753101 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.756169 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.762967 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: W1204 09:38:42.769148 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda3e11cde_e689_4b58_b238_08e945d8de0b.slice/crio-3ecbef4972d78ca72ea507dbd73fd518c16725bf0df5bc96b2c9d6a0757d088e WatchSource:0}: Error finding container 3ecbef4972d78ca72ea507dbd73fd518c16725bf0df5bc96b2c9d6a0757d088e: Status 404 returned error can't find the container with id 3ecbef4972d78ca72ea507dbd73fd518c16725bf0df5bc96b2c9d6a0757d088e Dec 04 09:38:42 crc kubenswrapper[4707]: W1204 09:38:42.769952 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5eddef4f_ede7_4ed6_8b03_acd65c4b9bdb.slice/crio-b4924668838c4191ef62e7e1dc1e72fcb7e8f31b607e0fa28ae52470fe6c3cc1 WatchSource:0}: Error finding container b4924668838c4191ef62e7e1dc1e72fcb7e8f31b607e0fa28ae52470fe6c3cc1: Status 404 returned error can't find the container with id b4924668838c4191ef62e7e1dc1e72fcb7e8f31b607e0fa28ae52470fe6c3cc1 Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.780108 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-npc85" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.786053 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.809846 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.822690 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.837384 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:42Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.844286 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:42 crc kubenswrapper[4707]: E1204 09:38:42.844440 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.847143 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.847168 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.847178 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.847284 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.847372 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:42Z","lastTransitionTime":"2025-12-04T09:38:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.952400 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.952696 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.952707 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.952722 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:42 crc kubenswrapper[4707]: I1204 09:38:42.952732 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:42Z","lastTransitionTime":"2025-12-04T09:38:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.055552 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.055811 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.055890 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.055967 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.056040 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:43Z","lastTransitionTime":"2025-12-04T09:38:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.158973 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.159255 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.159379 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.159473 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.159544 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:43Z","lastTransitionTime":"2025-12-04T09:38:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.267787 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.267816 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.267824 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.267837 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.267847 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:43Z","lastTransitionTime":"2025-12-04T09:38:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.370676 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.370718 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.370729 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.370743 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.370754 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:43Z","lastTransitionTime":"2025-12-04T09:38:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.473257 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.473780 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.473798 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.473819 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.473834 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:43Z","lastTransitionTime":"2025-12-04T09:38:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.489235 4707 generic.go:334] "Generic (PLEG): container finished" podID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerID="d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c" exitCode=0 Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.489315 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerDied","Data":"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.489406 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerStarted","Data":"b4924668838c4191ef62e7e1dc1e72fcb7e8f31b607e0fa28ae52470fe6c3cc1"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.494258 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" event={"ID":"a3e11cde-e689-4b58-b238-08e945d8de0b","Type":"ContainerStarted","Data":"c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.494304 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" event={"ID":"a3e11cde-e689-4b58-b238-08e945d8de0b","Type":"ContainerStarted","Data":"3ecbef4972d78ca72ea507dbd73fd518c16725bf0df5bc96b2c9d6a0757d088e"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.502087 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerStarted","Data":"a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.502154 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerStarted","Data":"d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.504581 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-npc85" event={"ID":"e9d3467a-1f4a-4d54-97b3-c7fd062eff13","Type":"ContainerStarted","Data":"04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.504614 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-npc85" event={"ID":"e9d3467a-1f4a-4d54-97b3-c7fd062eff13","Type":"ContainerStarted","Data":"dccaad66b1bbc977d20714ad9c8f88673df43738add422b40ed87b14e8f17431"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.514067 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.532444 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.550652 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.574754 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.577020 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.577045 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.577054 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.577067 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.577077 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:43Z","lastTransitionTime":"2025-12-04T09:38:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.588597 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.603774 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.616381 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.648664 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.672401 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.679600 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.679633 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.679642 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.679656 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.679666 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:43Z","lastTransitionTime":"2025-12-04T09:38:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.694852 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.709906 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.723721 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.735728 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.748403 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.766265 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.779625 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.781715 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.781751 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.781761 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.781777 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.781788 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:43Z","lastTransitionTime":"2025-12-04T09:38:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.792083 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.805057 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.815865 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.828152 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.844225 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.844301 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:43 crc kubenswrapper[4707]: E1204 09:38:43.844366 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:38:43 crc kubenswrapper[4707]: E1204 09:38:43.844462 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.846350 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.859188 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.873519 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.886574 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.887925 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.888003 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.888018 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.888036 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.888056 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:43Z","lastTransitionTime":"2025-12-04T09:38:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.900027 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-s4wsl"] Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.900391 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-s4wsl" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.902478 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.903122 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.903385 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.911513 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.913782 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.934079 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.947876 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.962132 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.973736 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f048d2e7-fb0a-4323-ab77-9ea37e5f3926-serviceca\") pod \"node-ca-s4wsl\" (UID: \"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\") " pod="openshift-image-registry/node-ca-s4wsl" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.973779 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sx52\" (UniqueName: \"kubernetes.io/projected/f048d2e7-fb0a-4323-ab77-9ea37e5f3926-kube-api-access-9sx52\") pod \"node-ca-s4wsl\" (UID: \"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\") " pod="openshift-image-registry/node-ca-s4wsl" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.973827 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f048d2e7-fb0a-4323-ab77-9ea37e5f3926-host\") pod \"node-ca-s4wsl\" (UID: \"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\") " pod="openshift-image-registry/node-ca-s4wsl" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.975645 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.990841 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.990880 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.990889 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.990904 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:43 crc kubenswrapper[4707]: I1204 09:38:43.990914 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:43Z","lastTransitionTime":"2025-12-04T09:38:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.002015 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:43Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.013355 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.030814 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.049122 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.062588 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.074369 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sx52\" (UniqueName: \"kubernetes.io/projected/f048d2e7-fb0a-4323-ab77-9ea37e5f3926-kube-api-access-9sx52\") pod \"node-ca-s4wsl\" (UID: \"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\") " pod="openshift-image-registry/node-ca-s4wsl" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.074665 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f048d2e7-fb0a-4323-ab77-9ea37e5f3926-host\") pod \"node-ca-s4wsl\" (UID: \"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\") " pod="openshift-image-registry/node-ca-s4wsl" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.074788 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f048d2e7-fb0a-4323-ab77-9ea37e5f3926-serviceca\") pod \"node-ca-s4wsl\" (UID: \"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\") " pod="openshift-image-registry/node-ca-s4wsl" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.074798 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f048d2e7-fb0a-4323-ab77-9ea37e5f3926-host\") pod \"node-ca-s4wsl\" (UID: \"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\") " pod="openshift-image-registry/node-ca-s4wsl" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.075752 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f048d2e7-fb0a-4323-ab77-9ea37e5f3926-serviceca\") pod \"node-ca-s4wsl\" (UID: \"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\") " pod="openshift-image-registry/node-ca-s4wsl" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.081320 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.094793 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.095111 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.095189 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.095273 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.095352 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:44Z","lastTransitionTime":"2025-12-04T09:38:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.096321 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sx52\" (UniqueName: \"kubernetes.io/projected/f048d2e7-fb0a-4323-ab77-9ea37e5f3926-kube-api-access-9sx52\") pod \"node-ca-s4wsl\" (UID: \"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\") " pod="openshift-image-registry/node-ca-s4wsl" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.097749 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.111276 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.126175 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.141452 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.154738 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.170325 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.183867 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.197552 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.197579 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.197589 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.197601 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.197610 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:44Z","lastTransitionTime":"2025-12-04T09:38:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.198138 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.238037 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-s4wsl" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.303251 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.303302 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.303313 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.303348 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.303361 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:44Z","lastTransitionTime":"2025-12-04T09:38:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.406152 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.406179 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.406188 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.406201 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.406209 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:44Z","lastTransitionTime":"2025-12-04T09:38:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.507974 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.508005 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.508014 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.508027 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.508035 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:44Z","lastTransitionTime":"2025-12-04T09:38:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.508839 4707 generic.go:334] "Generic (PLEG): container finished" podID="a3e11cde-e689-4b58-b238-08e945d8de0b" containerID="c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161" exitCode=0 Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.508901 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" event={"ID":"a3e11cde-e689-4b58-b238-08e945d8de0b","Type":"ContainerDied","Data":"c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.518855 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerStarted","Data":"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.518901 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerStarted","Data":"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.518918 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerStarted","Data":"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.518931 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerStarted","Data":"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.518944 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerStarted","Data":"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.518956 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerStarted","Data":"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.520712 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-s4wsl" event={"ID":"f048d2e7-fb0a-4323-ab77-9ea37e5f3926","Type":"ContainerStarted","Data":"0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.520773 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-s4wsl" event={"ID":"f048d2e7-fb0a-4323-ab77-9ea37e5f3926","Type":"ContainerStarted","Data":"fb2c0f7143f16d1b77b4a89cdbfc2cc5d3350d48b7c06c41bc45cc58166b3ac4"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.536018 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.557195 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.572460 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.586873 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.598106 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.610936 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.611636 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.611685 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.611706 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.611732 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.611754 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:44Z","lastTransitionTime":"2025-12-04T09:38:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.628363 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.641326 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.654579 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.663576 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.675819 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.687174 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.699518 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.714244 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.714278 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.714292 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.714308 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.714319 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:44Z","lastTransitionTime":"2025-12-04T09:38:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.717006 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.729840 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.744625 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.758097 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.772283 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.787687 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.801102 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.816218 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.816763 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.816801 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.816810 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.816823 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.816833 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:44Z","lastTransitionTime":"2025-12-04T09:38:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.845041 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:44 crc kubenswrapper[4707]: E1204 09:38:44.845168 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.845741 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.884642 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.920218 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.920255 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.920264 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.920279 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.920289 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:44Z","lastTransitionTime":"2025-12-04T09:38:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.930645 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:44 crc kubenswrapper[4707]: I1204 09:38:44.967605 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:44Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.008018 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.022493 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.022538 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.022547 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.022562 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.022574 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:45Z","lastTransitionTime":"2025-12-04T09:38:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.048313 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.083469 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.125817 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.125863 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.125876 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.125894 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.125905 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:45Z","lastTransitionTime":"2025-12-04T09:38:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.135300 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.161858 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.228273 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.228351 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.228365 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.228383 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.228395 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:45Z","lastTransitionTime":"2025-12-04T09:38:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.332132 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.332165 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.332173 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.332185 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.332194 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:45Z","lastTransitionTime":"2025-12-04T09:38:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.434741 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.434806 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.434824 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.434848 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.434866 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:45Z","lastTransitionTime":"2025-12-04T09:38:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.527420 4707 generic.go:334] "Generic (PLEG): container finished" podID="a3e11cde-e689-4b58-b238-08e945d8de0b" containerID="c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc" exitCode=0 Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.527474 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" event={"ID":"a3e11cde-e689-4b58-b238-08e945d8de0b","Type":"ContainerDied","Data":"c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc"} Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.537655 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.537698 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.537711 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.537727 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.537740 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:45Z","lastTransitionTime":"2025-12-04T09:38:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.549820 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.568229 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.580750 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.594033 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.608250 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.621714 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.634375 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.640577 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.640616 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.640628 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.640645 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.640657 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:45Z","lastTransitionTime":"2025-12-04T09:38:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.647882 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.659183 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.670712 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.686045 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.705401 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.716887 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.735740 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.742676 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.742714 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.742723 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.742737 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.742748 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:45Z","lastTransitionTime":"2025-12-04T09:38:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.767797 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:45Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.843997 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.844053 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:45 crc kubenswrapper[4707]: E1204 09:38:45.844138 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:38:45 crc kubenswrapper[4707]: E1204 09:38:45.844273 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.845353 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.845394 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.845406 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.845424 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.845440 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:45Z","lastTransitionTime":"2025-12-04T09:38:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.948044 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.948100 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.948130 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.948151 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:45 crc kubenswrapper[4707]: I1204 09:38:45.948163 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:45Z","lastTransitionTime":"2025-12-04T09:38:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.051650 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.051690 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.051700 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.051717 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.051729 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:46Z","lastTransitionTime":"2025-12-04T09:38:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.153617 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.153659 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.153672 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.153690 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.153704 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:46Z","lastTransitionTime":"2025-12-04T09:38:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.255901 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.255940 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.255948 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.255963 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.255973 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:46Z","lastTransitionTime":"2025-12-04T09:38:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.358753 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.358820 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.358839 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.358867 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.358885 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:46Z","lastTransitionTime":"2025-12-04T09:38:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.462397 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.462457 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.462474 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.462498 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.462520 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:46Z","lastTransitionTime":"2025-12-04T09:38:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.501540 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.501710 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:46 crc kubenswrapper[4707]: E1204 09:38:46.501793 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:38:54.50175343 +0000 UTC m=+33.937575977 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.502325 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:46 crc kubenswrapper[4707]: E1204 09:38:46.502378 4707 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:38:46 crc kubenswrapper[4707]: E1204 09:38:46.502579 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:54.502543434 +0000 UTC m=+33.938365941 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:38:46 crc kubenswrapper[4707]: E1204 09:38:46.502432 4707 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:38:46 crc kubenswrapper[4707]: E1204 09:38:46.502800 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:54.502790682 +0000 UTC m=+33.938613189 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.533915 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" event={"ID":"a3e11cde-e689-4b58-b238-08e945d8de0b","Type":"ContainerDied","Data":"28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618"} Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.534170 4707 generic.go:334] "Generic (PLEG): container finished" podID="a3e11cde-e689-4b58-b238-08e945d8de0b" containerID="28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618" exitCode=0 Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.540939 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerStarted","Data":"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f"} Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.551290 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.565381 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.565427 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.565437 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.565452 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.565467 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:46Z","lastTransitionTime":"2025-12-04T09:38:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.568443 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.584021 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.603212 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.603316 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:46 crc kubenswrapper[4707]: E1204 09:38:46.603463 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:38:46 crc kubenswrapper[4707]: E1204 09:38:46.603478 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:38:46 crc kubenswrapper[4707]: E1204 09:38:46.603491 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:38:46 crc kubenswrapper[4707]: E1204 09:38:46.603502 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:38:46 crc kubenswrapper[4707]: E1204 09:38:46.603507 4707 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:46 crc kubenswrapper[4707]: E1204 09:38:46.603516 4707 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:46 crc kubenswrapper[4707]: E1204 09:38:46.603565 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:54.603545205 +0000 UTC m=+34.039367722 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:46 crc kubenswrapper[4707]: E1204 09:38:46.603585 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 09:38:54.603576506 +0000 UTC m=+34.039399023 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.605987 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.629615 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.644139 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.658176 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.668446 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.668486 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.668525 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.668547 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.668560 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:46Z","lastTransitionTime":"2025-12-04T09:38:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.669438 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.684075 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.696969 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.709936 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.723118 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.734139 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.749060 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.765363 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:46Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.771386 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.771441 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.771456 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.771476 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.771488 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:46Z","lastTransitionTime":"2025-12-04T09:38:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.844959 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:46 crc kubenswrapper[4707]: E1204 09:38:46.845160 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.875283 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.875320 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.875329 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.875361 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.875371 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:46Z","lastTransitionTime":"2025-12-04T09:38:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.978041 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.978073 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.978082 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.978100 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:46 crc kubenswrapper[4707]: I1204 09:38:46.978110 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:46Z","lastTransitionTime":"2025-12-04T09:38:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.082084 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.082133 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.082147 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.082166 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.082177 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:47Z","lastTransitionTime":"2025-12-04T09:38:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.187117 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.187890 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.187927 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.187959 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.187984 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:47Z","lastTransitionTime":"2025-12-04T09:38:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.291095 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.291145 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.291166 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.291194 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.291214 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:47Z","lastTransitionTime":"2025-12-04T09:38:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.394207 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.394282 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.394299 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.394358 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.394376 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:47Z","lastTransitionTime":"2025-12-04T09:38:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.496398 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.496429 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.496438 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.496450 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.496459 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:47Z","lastTransitionTime":"2025-12-04T09:38:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.548950 4707 generic.go:334] "Generic (PLEG): container finished" podID="a3e11cde-e689-4b58-b238-08e945d8de0b" containerID="7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38" exitCode=0 Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.549007 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" event={"ID":"a3e11cde-e689-4b58-b238-08e945d8de0b","Type":"ContainerDied","Data":"7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38"} Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.581991 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.599309 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.604073 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.604106 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.604117 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.604134 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.604146 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:47Z","lastTransitionTime":"2025-12-04T09:38:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.619307 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.640923 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.660468 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.674480 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.686258 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.704051 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.706553 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.706583 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.706596 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.706612 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.706626 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:47Z","lastTransitionTime":"2025-12-04T09:38:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.719228 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.734472 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.748355 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.761538 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.774223 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.791006 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.809167 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.809213 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.809228 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.809249 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.809262 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:47Z","lastTransitionTime":"2025-12-04T09:38:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.810416 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:47Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.844691 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.844707 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:47 crc kubenswrapper[4707]: E1204 09:38:47.844812 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:38:47 crc kubenswrapper[4707]: E1204 09:38:47.844993 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.912301 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.912417 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.912479 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.912512 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:47 crc kubenswrapper[4707]: I1204 09:38:47.912578 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:47Z","lastTransitionTime":"2025-12-04T09:38:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.016025 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.016062 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.016071 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.016085 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.016097 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:48Z","lastTransitionTime":"2025-12-04T09:38:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.119208 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.119270 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.119289 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.119313 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.119330 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:48Z","lastTransitionTime":"2025-12-04T09:38:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.222655 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.222716 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.222742 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.222773 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.222795 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:48Z","lastTransitionTime":"2025-12-04T09:38:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.327195 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.328007 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.328422 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.328758 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.329021 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:48Z","lastTransitionTime":"2025-12-04T09:38:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.432205 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.432271 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.432295 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.432480 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.432503 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:48Z","lastTransitionTime":"2025-12-04T09:38:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.535665 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.536006 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.536151 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.536296 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.536530 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:48Z","lastTransitionTime":"2025-12-04T09:38:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.556634 4707 generic.go:334] "Generic (PLEG): container finished" podID="a3e11cde-e689-4b58-b238-08e945d8de0b" containerID="d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c" exitCode=0 Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.556675 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" event={"ID":"a3e11cde-e689-4b58-b238-08e945d8de0b","Type":"ContainerDied","Data":"d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c"} Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.579073 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.595951 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.646181 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.646421 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.646437 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.646455 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.646468 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:48Z","lastTransitionTime":"2025-12-04T09:38:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.662567 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.678683 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.690983 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.708509 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.720987 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.732189 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.746107 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.748636 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.748692 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.748709 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.748730 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.748744 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:48Z","lastTransitionTime":"2025-12-04T09:38:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.759425 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.770413 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.784866 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.809435 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.839593 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.846063 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:48 crc kubenswrapper[4707]: E1204 09:38:48.846150 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.855979 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.856000 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.856008 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.856018 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.856032 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:48Z","lastTransitionTime":"2025-12-04T09:38:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.872566 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:48Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.959050 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.959085 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.959095 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.959109 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:48 crc kubenswrapper[4707]: I1204 09:38:48.959120 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:48Z","lastTransitionTime":"2025-12-04T09:38:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.061981 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.062015 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.062023 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.062037 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.062048 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:49Z","lastTransitionTime":"2025-12-04T09:38:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.164602 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.164673 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.164692 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.164717 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.164736 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:49Z","lastTransitionTime":"2025-12-04T09:38:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.266912 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.266957 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.266969 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.266987 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.267000 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:49Z","lastTransitionTime":"2025-12-04T09:38:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.370427 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.370471 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.370479 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.370494 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.370506 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:49Z","lastTransitionTime":"2025-12-04T09:38:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.473036 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.473067 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.473074 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.473086 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.473098 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:49Z","lastTransitionTime":"2025-12-04T09:38:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.565041 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerStarted","Data":"70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39"} Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.565325 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.569956 4707 generic.go:334] "Generic (PLEG): container finished" podID="a3e11cde-e689-4b58-b238-08e945d8de0b" containerID="fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83" exitCode=0 Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.570023 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" event={"ID":"a3e11cde-e689-4b58-b238-08e945d8de0b","Type":"ContainerDied","Data":"fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83"} Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.575000 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.575044 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.575055 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.575070 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.575082 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:49Z","lastTransitionTime":"2025-12-04T09:38:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.593461 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.598256 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.614775 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.629045 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.649803 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.663746 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.677418 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.677466 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.677477 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.677494 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.677505 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:49Z","lastTransitionTime":"2025-12-04T09:38:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.681881 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.693291 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.706241 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.722026 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.735512 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.746470 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.756960 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.770313 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.779251 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.779284 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.779293 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.779306 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.779315 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:49Z","lastTransitionTime":"2025-12-04T09:38:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.784261 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.796465 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.808572 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.823866 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.841522 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.843952 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.844016 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:49 crc kubenswrapper[4707]: E1204 09:38:49.844066 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:38:49 crc kubenswrapper[4707]: E1204 09:38:49.844145 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.859238 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.871119 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.883373 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.883408 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.883417 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.883431 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.883441 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:49Z","lastTransitionTime":"2025-12-04T09:38:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.888581 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.899365 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.911701 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.923727 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.935738 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.947842 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.959151 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.970441 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.983216 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.986268 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.986300 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.986312 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.986324 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.986359 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:49Z","lastTransitionTime":"2025-12-04T09:38:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:49 crc kubenswrapper[4707]: I1204 09:38:49.995199 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:49Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.089060 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.089095 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.089103 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.089116 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.089125 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:50Z","lastTransitionTime":"2025-12-04T09:38:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.191194 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.191241 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.191544 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.191877 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.191894 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:50Z","lastTransitionTime":"2025-12-04T09:38:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.294964 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.295032 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.295051 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.295077 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.295094 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:50Z","lastTransitionTime":"2025-12-04T09:38:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.398285 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.398381 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.398399 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.398421 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.398437 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:50Z","lastTransitionTime":"2025-12-04T09:38:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.447272 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.472073 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.502455 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.502530 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.502553 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.502585 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.502609 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:50Z","lastTransitionTime":"2025-12-04T09:38:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.514189 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.535069 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.559747 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.578528 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" event={"ID":"a3e11cde-e689-4b58-b238-08e945d8de0b","Type":"ContainerStarted","Data":"c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4"} Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.579249 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.579309 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.593886 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.605197 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.605246 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.605262 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.605282 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.605297 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:50Z","lastTransitionTime":"2025-12-04T09:38:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.613209 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.620484 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.639808 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.652480 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.666662 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.678798 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.690785 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.702444 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.707547 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.707586 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.707597 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.707611 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.707622 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:50Z","lastTransitionTime":"2025-12-04T09:38:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.712362 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.722223 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.737132 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.748703 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.761630 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.773555 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.788644 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.802170 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.809974 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.810009 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.810018 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.810032 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.810043 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:50Z","lastTransitionTime":"2025-12-04T09:38:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.816176 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.827305 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.837277 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.844324 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:50 crc kubenswrapper[4707]: E1204 09:38:50.844466 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.847714 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.859313 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.871367 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.883106 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.894946 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.912359 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.912400 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.912410 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.912426 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.912436 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:50Z","lastTransitionTime":"2025-12-04T09:38:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.915598 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.935941 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.950390 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.966171 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.977132 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:50 crc kubenswrapper[4707]: I1204 09:38:50.994354 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:50Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.009447 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.014432 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.014454 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.014462 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.016647 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.016702 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:51Z","lastTransitionTime":"2025-12-04T09:38:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.024248 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.037135 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.049736 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.061087 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.074061 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.085328 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.104305 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.115847 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.118511 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.118560 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.118570 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.118586 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.118596 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:51Z","lastTransitionTime":"2025-12-04T09:38:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.127921 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.144760 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.221619 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.221677 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.221695 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.221717 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.221735 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:51Z","lastTransitionTime":"2025-12-04T09:38:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.324749 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.324832 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.324853 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.324882 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.324906 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:51Z","lastTransitionTime":"2025-12-04T09:38:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.428371 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.428444 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.428481 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.428516 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.428538 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:51Z","lastTransitionTime":"2025-12-04T09:38:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.531816 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.531870 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.531881 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.531900 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.531913 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:51Z","lastTransitionTime":"2025-12-04T09:38:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.583892 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/0.log" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.587910 4707 generic.go:334] "Generic (PLEG): container finished" podID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerID="70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39" exitCode=1 Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.587977 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerDied","Data":"70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39"} Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.588480 4707 scope.go:117] "RemoveContainer" containerID="70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.606544 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.618531 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.628491 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.637137 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.637323 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.637371 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.637455 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.637480 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:51Z","lastTransitionTime":"2025-12-04T09:38:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.642391 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.655158 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.668282 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.680204 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.691134 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.710987 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.723049 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.740489 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.740521 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.740534 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.740550 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.740573 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:51Z","lastTransitionTime":"2025-12-04T09:38:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.743400 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.764429 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:38:51Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:38:51.107323 5996 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 09:38:51.107349 5996 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1204 09:38:51.107373 5996 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:38:51.107376 5996 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 09:38:51.107385 5996 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1204 09:38:51.107399 5996 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 09:38:51.107411 5996 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1204 09:38:51.107422 5996 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1204 09:38:51.107428 5996 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1204 09:38:51.107427 5996 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:38:51.107435 5996 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1204 09:38:51.107436 5996 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1204 09:38:51.107438 5996 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 09:38:51.107441 5996 factory.go:656] Stopping watch factory\\\\nI1204 09:38:51.107457 5996 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.782400 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.798004 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.809794 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:51Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.842722 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.842760 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.842768 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.842782 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.842792 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:51Z","lastTransitionTime":"2025-12-04T09:38:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.844196 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:51 crc kubenswrapper[4707]: E1204 09:38:51.844318 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.844406 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:51 crc kubenswrapper[4707]: E1204 09:38:51.844568 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.945441 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.945482 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.945491 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.945506 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:51 crc kubenswrapper[4707]: I1204 09:38:51.945515 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:51Z","lastTransitionTime":"2025-12-04T09:38:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.047699 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.047752 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.047765 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.047782 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.047795 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.150367 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.150422 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.150439 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.150461 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.150475 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.252239 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.252287 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.252301 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.252318 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.252331 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.354884 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.354927 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.354940 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.354956 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.354970 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.457666 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.457733 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.457754 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.457785 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.457809 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.561195 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.561262 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.561285 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.561317 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.561386 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.593685 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/0.log" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.596365 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerStarted","Data":"cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.596960 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.612395 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.634689 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:38:51Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:38:51.107323 5996 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 09:38:51.107349 5996 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1204 09:38:51.107373 5996 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:38:51.107376 5996 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 09:38:51.107385 5996 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1204 09:38:51.107399 5996 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 09:38:51.107411 5996 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1204 09:38:51.107422 5996 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1204 09:38:51.107428 5996 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1204 09:38:51.107427 5996 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:38:51.107435 5996 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1204 09:38:51.107436 5996 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1204 09:38:51.107438 5996 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 09:38:51.107441 5996 factory.go:656] Stopping watch factory\\\\nI1204 09:38:51.107457 5996 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.663791 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.663850 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.663870 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.663895 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.663914 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.667906 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.671189 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.671252 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.671271 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.671295 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.671313 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.684600 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: E1204 09:38:52.687277 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.691726 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.691815 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.691840 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.691868 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.691886 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.703347 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: E1204 09:38:52.711145 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.715226 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.715300 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.715325 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.715390 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.715415 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.720906 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: E1204 09:38:52.733825 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.737760 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.737814 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.737829 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.737861 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.737880 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.739670 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: E1204 09:38:52.753998 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.758106 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.758156 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.758171 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.758195 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.758211 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.758373 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.772866 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: E1204 09:38:52.772963 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: E1204 09:38:52.773113 4707 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.774964 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.774993 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.775005 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.775024 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.775035 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.789388 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.801674 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.816694 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.828062 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.841613 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.844196 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:52 crc kubenswrapper[4707]: E1204 09:38:52.844351 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.854074 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:52Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.877954 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.877981 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.877990 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.878003 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.878014 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.980557 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.980621 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.980644 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.980670 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:52 crc kubenswrapper[4707]: I1204 09:38:52.980688 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:52Z","lastTransitionTime":"2025-12-04T09:38:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.083690 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.083757 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.083774 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.083795 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.083810 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:53Z","lastTransitionTime":"2025-12-04T09:38:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.186674 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.186728 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.186741 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.186758 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.186769 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:53Z","lastTransitionTime":"2025-12-04T09:38:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.288920 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.288950 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.288958 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.288970 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.288979 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:53Z","lastTransitionTime":"2025-12-04T09:38:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.392131 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.392174 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.392199 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.392214 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.392223 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:53Z","lastTransitionTime":"2025-12-04T09:38:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.495609 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.495663 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.495683 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.495702 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.495713 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:53Z","lastTransitionTime":"2025-12-04T09:38:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.599047 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.599100 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.599119 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.599146 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.599166 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:53Z","lastTransitionTime":"2025-12-04T09:38:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.601135 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/1.log" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.601757 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/0.log" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.605048 4707 generic.go:334] "Generic (PLEG): container finished" podID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerID="cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44" exitCode=1 Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.605084 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerDied","Data":"cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44"} Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.605118 4707 scope.go:117] "RemoveContainer" containerID="70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.605774 4707 scope.go:117] "RemoveContainer" containerID="cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44" Dec 04 09:38:53 crc kubenswrapper[4707]: E1204 09:38:53.605939 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.631078 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.648614 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.665016 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.682795 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:38:51Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:38:51.107323 5996 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 09:38:51.107349 5996 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1204 09:38:51.107373 5996 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:38:51.107376 5996 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 09:38:51.107385 5996 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1204 09:38:51.107399 5996 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 09:38:51.107411 5996 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1204 09:38:51.107422 5996 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1204 09:38:51.107428 5996 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1204 09:38:51.107427 5996 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:38:51.107435 5996 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1204 09:38:51.107436 5996 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1204 09:38:51.107438 5996 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 09:38:51.107441 5996 factory.go:656] Stopping watch factory\\\\nI1204 09:38:51.107457 5996 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.419952 6142 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:38:52.420456 6142 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 09:38:52.420484 6142 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1204 09:38:52.420574 6142 factory.go:656] Stopping watch factory\\\\nI1204 09:38:52.420594 6142 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420720 6142 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420943 6142 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1204 09:38:52.420600 6142 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 09:38:52.421023 6142 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 09:38:52.421449 6142 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.695323 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.701374 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.701534 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.701637 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.701741 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.701832 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:53Z","lastTransitionTime":"2025-12-04T09:38:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.713051 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.723428 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.735028 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.747885 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.760684 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.773363 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.786023 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.796804 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.804670 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.804719 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.804734 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.804755 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.804766 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:53Z","lastTransitionTime":"2025-12-04T09:38:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.809497 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.823081 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:53Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.844304 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.844352 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:53 crc kubenswrapper[4707]: E1204 09:38:53.844446 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:38:53 crc kubenswrapper[4707]: E1204 09:38:53.844599 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.906768 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.906801 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.906811 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.906826 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:53 crc kubenswrapper[4707]: I1204 09:38:53.906839 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:53Z","lastTransitionTime":"2025-12-04T09:38:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.009838 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.009894 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.009909 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.009927 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.009940 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:54Z","lastTransitionTime":"2025-12-04T09:38:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.113038 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.113091 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.113107 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.113130 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.113149 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:54Z","lastTransitionTime":"2025-12-04T09:38:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.216411 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.216507 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.216543 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.216564 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.216576 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:54Z","lastTransitionTime":"2025-12-04T09:38:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.313928 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6"] Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.314607 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.317979 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.317981 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.320112 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.320180 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.320204 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.320234 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.320259 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:54Z","lastTransitionTime":"2025-12-04T09:38:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.337829 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.354761 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.367502 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.383573 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.400472 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.408781 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1efd049a-60ac-4370-8d36-37674547ede9-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-nm9w6\" (UID: \"1efd049a-60ac-4370-8d36-37674547ede9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.408871 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1efd049a-60ac-4370-8d36-37674547ede9-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-nm9w6\" (UID: \"1efd049a-60ac-4370-8d36-37674547ede9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.408906 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1efd049a-60ac-4370-8d36-37674547ede9-env-overrides\") pod \"ovnkube-control-plane-749d76644c-nm9w6\" (UID: \"1efd049a-60ac-4370-8d36-37674547ede9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.408928 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lz6tz\" (UniqueName: \"kubernetes.io/projected/1efd049a-60ac-4370-8d36-37674547ede9-kube-api-access-lz6tz\") pod \"ovnkube-control-plane-749d76644c-nm9w6\" (UID: \"1efd049a-60ac-4370-8d36-37674547ede9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.413573 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.422632 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.422677 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.422688 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.422703 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.422714 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:54Z","lastTransitionTime":"2025-12-04T09:38:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.429595 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.443053 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.457111 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.471522 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.486439 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.500570 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.509479 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.509627 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1efd049a-60ac-4370-8d36-37674547ede9-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-nm9w6\" (UID: \"1efd049a-60ac-4370-8d36-37674547ede9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.509713 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:39:10.5096817 +0000 UTC m=+49.945504217 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.509831 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.509892 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1efd049a-60ac-4370-8d36-37674547ede9-env-overrides\") pod \"ovnkube-control-plane-749d76644c-nm9w6\" (UID: \"1efd049a-60ac-4370-8d36-37674547ede9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.509920 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lz6tz\" (UniqueName: \"kubernetes.io/projected/1efd049a-60ac-4370-8d36-37674547ede9-kube-api-access-lz6tz\") pod \"ovnkube-control-plane-749d76644c-nm9w6\" (UID: \"1efd049a-60ac-4370-8d36-37674547ede9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.509957 4707 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.510009 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1efd049a-60ac-4370-8d36-37674547ede9-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-nm9w6\" (UID: \"1efd049a-60ac-4370-8d36-37674547ede9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.510039 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:39:10.510016541 +0000 UTC m=+49.945839248 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.510075 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.510614 4707 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.510713 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:39:10.510690702 +0000 UTC m=+49.946513229 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.513876 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1efd049a-60ac-4370-8d36-37674547ede9-env-overrides\") pod \"ovnkube-control-plane-749d76644c-nm9w6\" (UID: \"1efd049a-60ac-4370-8d36-37674547ede9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.514574 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1efd049a-60ac-4370-8d36-37674547ede9-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-nm9w6\" (UID: \"1efd049a-60ac-4370-8d36-37674547ede9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.519708 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1efd049a-60ac-4370-8d36-37674547ede9-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-nm9w6\" (UID: \"1efd049a-60ac-4370-8d36-37674547ede9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.525286 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.525444 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.525536 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.525623 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.525711 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:54Z","lastTransitionTime":"2025-12-04T09:38:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.531964 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lz6tz\" (UniqueName: \"kubernetes.io/projected/1efd049a-60ac-4370-8d36-37674547ede9-kube-api-access-lz6tz\") pod \"ovnkube-control-plane-749d76644c-nm9w6\" (UID: \"1efd049a-60ac-4370-8d36-37674547ede9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.532934 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.547553 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.563638 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.584997 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70c8c1cc461d139544ad443e1b694b69d44f740705cb1a118d417738842bae39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:38:51Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:38:51.107323 5996 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 09:38:51.107349 5996 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1204 09:38:51.107373 5996 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:38:51.107376 5996 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 09:38:51.107385 5996 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1204 09:38:51.107399 5996 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 09:38:51.107411 5996 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1204 09:38:51.107422 5996 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1204 09:38:51.107428 5996 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1204 09:38:51.107427 5996 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:38:51.107435 5996 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1204 09:38:51.107436 5996 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1204 09:38:51.107438 5996 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 09:38:51.107441 5996 factory.go:656] Stopping watch factory\\\\nI1204 09:38:51.107457 5996 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.419952 6142 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:38:52.420456 6142 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 09:38:52.420484 6142 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1204 09:38:52.420574 6142 factory.go:656] Stopping watch factory\\\\nI1204 09:38:52.420594 6142 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420720 6142 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420943 6142 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1204 09:38:52.420600 6142 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 09:38:52.421023 6142 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 09:38:52.421449 6142 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.610825 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.610929 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.610990 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.611014 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.611030 4707 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.611062 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.611083 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.611093 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 09:39:10.611070868 +0000 UTC m=+50.046893555 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.611097 4707 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.611157 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 09:39:10.61113696 +0000 UTC m=+50.046959487 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.611280 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/1.log" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.615655 4707 scope.go:117] "RemoveContainer" containerID="cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44" Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.616448 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.627855 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.627912 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.627927 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.627949 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.627965 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:54Z","lastTransitionTime":"2025-12-04T09:38:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.631447 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.632594 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.643273 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: W1204 09:38:54.649404 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1efd049a_60ac_4370_8d36_37674547ede9.slice/crio-a00e12f39957efe3e6020311744b76493f7125fc65a59e867e32a1a25d5e3d2f WatchSource:0}: Error finding container a00e12f39957efe3e6020311744b76493f7125fc65a59e867e32a1a25d5e3d2f: Status 404 returned error can't find the container with id a00e12f39957efe3e6020311744b76493f7125fc65a59e867e32a1a25d5e3d2f Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.653699 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.663743 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.676035 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.690914 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.708510 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.727514 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.730625 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.730673 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.730686 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.730704 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.730719 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:54Z","lastTransitionTime":"2025-12-04T09:38:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.748848 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.763618 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.778683 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.796104 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.419952 6142 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:38:52.420456 6142 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 09:38:52.420484 6142 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1204 09:38:52.420574 6142 factory.go:656] Stopping watch factory\\\\nI1204 09:38:52.420594 6142 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420720 6142 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420943 6142 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1204 09:38:52.420600 6142 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 09:38:52.421023 6142 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 09:38:52.421449 6142 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.810371 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.823298 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.833366 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.833405 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.833426 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.833446 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.833458 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:54Z","lastTransitionTime":"2025-12-04T09:38:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.837753 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.844185 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:54 crc kubenswrapper[4707]: E1204 09:38:54.844378 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.849939 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:54Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.935880 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.935935 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.935947 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.935970 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:54 crc kubenswrapper[4707]: I1204 09:38:54.935984 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:54Z","lastTransitionTime":"2025-12-04T09:38:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.040426 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.040464 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.040476 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.040492 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.040502 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:55Z","lastTransitionTime":"2025-12-04T09:38:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.143127 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.143167 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.143182 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.143203 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.143220 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:55Z","lastTransitionTime":"2025-12-04T09:38:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.246325 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.246402 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.246413 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.246431 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.246443 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:55Z","lastTransitionTime":"2025-12-04T09:38:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.352138 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.352220 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.352241 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.352265 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.352291 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:55Z","lastTransitionTime":"2025-12-04T09:38:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.446017 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-txkn2"] Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.446623 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:38:55 crc kubenswrapper[4707]: E1204 09:38:55.446692 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.455623 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.455680 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.455694 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.455725 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.455740 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:55Z","lastTransitionTime":"2025-12-04T09:38:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.472850 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.487718 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.503466 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.533187 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.419952 6142 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:38:52.420456 6142 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 09:38:52.420484 6142 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1204 09:38:52.420574 6142 factory.go:656] Stopping watch factory\\\\nI1204 09:38:52.420594 6142 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420720 6142 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420943 6142 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1204 09:38:52.420600 6142 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 09:38:52.421023 6142 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 09:38:52.421449 6142 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.551530 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.558444 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.558507 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.558524 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.558545 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.558565 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:55Z","lastTransitionTime":"2025-12-04T09:38:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.572524 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.585083 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.595924 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.608987 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.620593 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" event={"ID":"1efd049a-60ac-4370-8d36-37674547ede9","Type":"ContainerStarted","Data":"45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d"} Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.620636 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" event={"ID":"1efd049a-60ac-4370-8d36-37674547ede9","Type":"ContainerStarted","Data":"23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3"} Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.620649 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" event={"ID":"1efd049a-60ac-4370-8d36-37674547ede9","Type":"ContainerStarted","Data":"a00e12f39957efe3e6020311744b76493f7125fc65a59e867e32a1a25d5e3d2f"} Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.620787 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl4bm\" (UniqueName: \"kubernetes.io/projected/9a8009fd-d652-44fb-8ef1-73078262e8fa-kube-api-access-wl4bm\") pod \"network-metrics-daemon-txkn2\" (UID: \"9a8009fd-d652-44fb-8ef1-73078262e8fa\") " pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.620862 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs\") pod \"network-metrics-daemon-txkn2\" (UID: \"9a8009fd-d652-44fb-8ef1-73078262e8fa\") " pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.623328 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.639228 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.651746 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.660733 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.660760 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.660771 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.660783 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.660792 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:55Z","lastTransitionTime":"2025-12-04T09:38:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.667504 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.680997 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.692639 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.706000 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.718588 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.722043 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs\") pod \"network-metrics-daemon-txkn2\" (UID: \"9a8009fd-d652-44fb-8ef1-73078262e8fa\") " pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.722099 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl4bm\" (UniqueName: \"kubernetes.io/projected/9a8009fd-d652-44fb-8ef1-73078262e8fa-kube-api-access-wl4bm\") pod \"network-metrics-daemon-txkn2\" (UID: \"9a8009fd-d652-44fb-8ef1-73078262e8fa\") " pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:38:55 crc kubenswrapper[4707]: E1204 09:38:55.722728 4707 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:38:55 crc kubenswrapper[4707]: E1204 09:38:55.722852 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs podName:9a8009fd-d652-44fb-8ef1-73078262e8fa nodeName:}" failed. No retries permitted until 2025-12-04 09:38:56.222820305 +0000 UTC m=+35.658642972 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs") pod "network-metrics-daemon-txkn2" (UID: "9a8009fd-d652-44fb-8ef1-73078262e8fa") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.732673 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.745382 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl4bm\" (UniqueName: \"kubernetes.io/projected/9a8009fd-d652-44fb-8ef1-73078262e8fa-kube-api-access-wl4bm\") pod \"network-metrics-daemon-txkn2\" (UID: \"9a8009fd-d652-44fb-8ef1-73078262e8fa\") " pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.746531 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.762027 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.763568 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.763605 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.763618 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.763635 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.763647 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:55Z","lastTransitionTime":"2025-12-04T09:38:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.779884 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.795252 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.810383 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.824064 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.840689 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.844052 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.844030 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:55 crc kubenswrapper[4707]: E1204 09:38:55.844229 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:38:55 crc kubenswrapper[4707]: E1204 09:38:55.844264 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.863625 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.866179 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.866252 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.866276 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.866310 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.866367 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:55Z","lastTransitionTime":"2025-12-04T09:38:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.883484 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.903312 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.927095 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.419952 6142 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:38:52.420456 6142 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 09:38:52.420484 6142 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1204 09:38:52.420574 6142 factory.go:656] Stopping watch factory\\\\nI1204 09:38:52.420594 6142 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420720 6142 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420943 6142 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1204 09:38:52.420600 6142 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 09:38:52.421023 6142 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 09:38:52.421449 6142 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.940996 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.955881 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.969460 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.969505 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.969517 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.969534 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.969547 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:55Z","lastTransitionTime":"2025-12-04T09:38:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.977868 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:55 crc kubenswrapper[4707]: I1204 09:38:55.991165 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:55Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.003271 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:38:56Z is after 2025-08-24T17:21:41Z" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.071474 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.071498 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.071510 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.071537 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.071549 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:56Z","lastTransitionTime":"2025-12-04T09:38:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.174012 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.174058 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.174069 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.174085 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.174097 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:56Z","lastTransitionTime":"2025-12-04T09:38:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.227303 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs\") pod \"network-metrics-daemon-txkn2\" (UID: \"9a8009fd-d652-44fb-8ef1-73078262e8fa\") " pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:38:56 crc kubenswrapper[4707]: E1204 09:38:56.227555 4707 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:38:56 crc kubenswrapper[4707]: E1204 09:38:56.228225 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs podName:9a8009fd-d652-44fb-8ef1-73078262e8fa nodeName:}" failed. No retries permitted until 2025-12-04 09:38:57.22769188 +0000 UTC m=+36.663514507 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs") pod "network-metrics-daemon-txkn2" (UID: "9a8009fd-d652-44fb-8ef1-73078262e8fa") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.277120 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.277188 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.277203 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.277229 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.277245 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:56Z","lastTransitionTime":"2025-12-04T09:38:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.380952 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.381008 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.381023 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.381043 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.381057 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:56Z","lastTransitionTime":"2025-12-04T09:38:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.484424 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.484469 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.484480 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.484502 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.484517 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:56Z","lastTransitionTime":"2025-12-04T09:38:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.587664 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.587733 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.587754 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.587782 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.587804 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:56Z","lastTransitionTime":"2025-12-04T09:38:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.690956 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.691031 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.691059 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.691085 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.691103 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:56Z","lastTransitionTime":"2025-12-04T09:38:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.793813 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.793863 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.793880 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.793914 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.793931 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:56Z","lastTransitionTime":"2025-12-04T09:38:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.845025 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:56 crc kubenswrapper[4707]: E1204 09:38:56.845196 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.845572 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:38:56 crc kubenswrapper[4707]: E1204 09:38:56.846459 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.898843 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.899255 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.899279 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.899300 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:56 crc kubenswrapper[4707]: I1204 09:38:56.899315 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:56Z","lastTransitionTime":"2025-12-04T09:38:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.002148 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.002196 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.002209 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.002232 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.002246 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:57Z","lastTransitionTime":"2025-12-04T09:38:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.104133 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.104174 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.104184 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.104200 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.104211 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:57Z","lastTransitionTime":"2025-12-04T09:38:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.206559 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.206593 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.206601 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.206614 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.206623 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:57Z","lastTransitionTime":"2025-12-04T09:38:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.237086 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs\") pod \"network-metrics-daemon-txkn2\" (UID: \"9a8009fd-d652-44fb-8ef1-73078262e8fa\") " pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:38:57 crc kubenswrapper[4707]: E1204 09:38:57.237270 4707 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:38:57 crc kubenswrapper[4707]: E1204 09:38:57.237406 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs podName:9a8009fd-d652-44fb-8ef1-73078262e8fa nodeName:}" failed. No retries permitted until 2025-12-04 09:38:59.237385397 +0000 UTC m=+38.673208064 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs") pod "network-metrics-daemon-txkn2" (UID: "9a8009fd-d652-44fb-8ef1-73078262e8fa") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.309500 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.309532 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.309541 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.309554 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.309564 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:57Z","lastTransitionTime":"2025-12-04T09:38:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.411461 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.411501 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.411512 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.411526 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.411581 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:57Z","lastTransitionTime":"2025-12-04T09:38:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.514393 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.514435 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.514445 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.514461 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.514473 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:57Z","lastTransitionTime":"2025-12-04T09:38:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.617495 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.617557 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.617571 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.617590 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.617603 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:57Z","lastTransitionTime":"2025-12-04T09:38:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.720686 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.720726 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.720735 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.720748 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.720757 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:57Z","lastTransitionTime":"2025-12-04T09:38:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.824438 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.824497 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.824518 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.824540 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.824555 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:57Z","lastTransitionTime":"2025-12-04T09:38:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.844026 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.844113 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:57 crc kubenswrapper[4707]: E1204 09:38:57.844153 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:38:57 crc kubenswrapper[4707]: E1204 09:38:57.844296 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.928038 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.928103 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.928121 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.928145 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:57 crc kubenswrapper[4707]: I1204 09:38:57.928163 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:57Z","lastTransitionTime":"2025-12-04T09:38:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.031469 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.031551 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.031578 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.031608 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.031630 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:58Z","lastTransitionTime":"2025-12-04T09:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.135035 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.135100 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.135128 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.135156 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.135175 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:58Z","lastTransitionTime":"2025-12-04T09:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.238033 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.238074 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.238084 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.238100 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.238111 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:58Z","lastTransitionTime":"2025-12-04T09:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.341064 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.341125 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.341147 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.341175 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.341196 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:58Z","lastTransitionTime":"2025-12-04T09:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.443959 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.444034 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.444051 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.444076 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.444093 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:58Z","lastTransitionTime":"2025-12-04T09:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.547987 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.548055 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.548077 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.548103 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.548120 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:58Z","lastTransitionTime":"2025-12-04T09:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.650202 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.650262 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.650279 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.650300 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.650317 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:58Z","lastTransitionTime":"2025-12-04T09:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.754009 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.754074 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.754085 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.754100 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.754109 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:58Z","lastTransitionTime":"2025-12-04T09:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.844595 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.844673 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:38:58 crc kubenswrapper[4707]: E1204 09:38:58.844754 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:38:58 crc kubenswrapper[4707]: E1204 09:38:58.844828 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.856515 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.856583 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.856608 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.856637 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.856662 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:58Z","lastTransitionTime":"2025-12-04T09:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.959704 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.959778 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.959803 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.959834 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:58 crc kubenswrapper[4707]: I1204 09:38:58.959857 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:58Z","lastTransitionTime":"2025-12-04T09:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.062197 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.062268 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.062287 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.062312 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.062329 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:59Z","lastTransitionTime":"2025-12-04T09:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.165107 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.165163 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.165178 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.165197 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.165211 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:59Z","lastTransitionTime":"2025-12-04T09:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.259293 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs\") pod \"network-metrics-daemon-txkn2\" (UID: \"9a8009fd-d652-44fb-8ef1-73078262e8fa\") " pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:38:59 crc kubenswrapper[4707]: E1204 09:38:59.259477 4707 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:38:59 crc kubenswrapper[4707]: E1204 09:38:59.259547 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs podName:9a8009fd-d652-44fb-8ef1-73078262e8fa nodeName:}" failed. No retries permitted until 2025-12-04 09:39:03.259529929 +0000 UTC m=+42.695352446 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs") pod "network-metrics-daemon-txkn2" (UID: "9a8009fd-d652-44fb-8ef1-73078262e8fa") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.268923 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.268960 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.268972 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.268988 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.269000 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:59Z","lastTransitionTime":"2025-12-04T09:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.371190 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.371232 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.371243 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.371258 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.371268 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:59Z","lastTransitionTime":"2025-12-04T09:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.473699 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.473741 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.473752 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.473768 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.473781 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:59Z","lastTransitionTime":"2025-12-04T09:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.576546 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.576589 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.576600 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.576616 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.576628 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:59Z","lastTransitionTime":"2025-12-04T09:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.679245 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.679321 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.679373 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.679399 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.679416 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:59Z","lastTransitionTime":"2025-12-04T09:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.781510 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.781574 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.781587 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.781606 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.781621 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:59Z","lastTransitionTime":"2025-12-04T09:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.844166 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.844195 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:38:59 crc kubenswrapper[4707]: E1204 09:38:59.844456 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:38:59 crc kubenswrapper[4707]: E1204 09:38:59.844544 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.885463 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.885512 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.885525 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.885543 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.885556 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:59Z","lastTransitionTime":"2025-12-04T09:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.988558 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.988605 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.988618 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.988635 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:38:59 crc kubenswrapper[4707]: I1204 09:38:59.988647 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:38:59Z","lastTransitionTime":"2025-12-04T09:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.092416 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.092468 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.092482 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.092498 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.092510 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:00Z","lastTransitionTime":"2025-12-04T09:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.195407 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.195446 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.195457 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.195474 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.195487 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:00Z","lastTransitionTime":"2025-12-04T09:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.297619 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.297976 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.298131 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.298253 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.298407 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:00Z","lastTransitionTime":"2025-12-04T09:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.401769 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.401807 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.401818 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.401832 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.401841 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:00Z","lastTransitionTime":"2025-12-04T09:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.504152 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.504205 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.504216 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.504234 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.504249 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:00Z","lastTransitionTime":"2025-12-04T09:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.607470 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.607541 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.607579 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.607612 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.607635 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:00Z","lastTransitionTime":"2025-12-04T09:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.710630 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.710712 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.710731 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.710756 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.710773 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:00Z","lastTransitionTime":"2025-12-04T09:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.817730 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.817766 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.817779 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.817792 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.817803 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:00Z","lastTransitionTime":"2025-12-04T09:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.844272 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.844605 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:00 crc kubenswrapper[4707]: E1204 09:39:00.844696 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:00 crc kubenswrapper[4707]: E1204 09:39:00.844989 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.863891 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:00Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.882052 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:00Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.898267 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:00Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.920380 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.920457 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.920481 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.920516 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.920539 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:00Z","lastTransitionTime":"2025-12-04T09:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.928141 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:00Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.950158 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:00Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.971488 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:00Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:00 crc kubenswrapper[4707]: I1204 09:39:00.989935 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:00Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.015066 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:01Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.022941 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.022991 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.023004 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.023024 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.023036 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:01Z","lastTransitionTime":"2025-12-04T09:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.046688 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.419952 6142 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:38:52.420456 6142 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 09:38:52.420484 6142 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1204 09:38:52.420574 6142 factory.go:656] Stopping watch factory\\\\nI1204 09:38:52.420594 6142 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420720 6142 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420943 6142 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1204 09:38:52.420600 6142 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 09:38:52.421023 6142 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 09:38:52.421449 6142 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:01Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.072361 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:01Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.087432 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:01Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.105494 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:01Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.119557 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:01Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.125209 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.125304 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.125323 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.125380 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.125398 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:01Z","lastTransitionTime":"2025-12-04T09:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.134287 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:01Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.147503 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:01Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.161378 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:01Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.179577 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:01Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.227573 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.227621 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.227637 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.227659 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.227677 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:01Z","lastTransitionTime":"2025-12-04T09:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.330447 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.330800 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.330873 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.330942 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.331009 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:01Z","lastTransitionTime":"2025-12-04T09:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.434182 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.434252 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.434269 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.434297 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.434318 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:01Z","lastTransitionTime":"2025-12-04T09:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.537171 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.537220 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.537229 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.537243 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.537255 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:01Z","lastTransitionTime":"2025-12-04T09:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.638995 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.639024 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.639097 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.639115 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.639151 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:01Z","lastTransitionTime":"2025-12-04T09:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.742976 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.743045 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.743062 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.743086 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.743104 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:01Z","lastTransitionTime":"2025-12-04T09:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.844310 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.844456 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:01 crc kubenswrapper[4707]: E1204 09:39:01.844560 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:01 crc kubenswrapper[4707]: E1204 09:39:01.845422 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.846611 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.846656 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.846676 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.846694 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.846706 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:01Z","lastTransitionTime":"2025-12-04T09:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.949849 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.949922 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.949945 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.949974 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:01 crc kubenswrapper[4707]: I1204 09:39:01.949994 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:01Z","lastTransitionTime":"2025-12-04T09:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.052583 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.052643 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.052662 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.052686 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.052702 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:02Z","lastTransitionTime":"2025-12-04T09:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.154808 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.154849 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.154859 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.154872 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.154882 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:02Z","lastTransitionTime":"2025-12-04T09:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.257710 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.257760 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.257776 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.257796 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.257812 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:02Z","lastTransitionTime":"2025-12-04T09:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.360664 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.360723 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.360741 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.360773 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.360795 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:02Z","lastTransitionTime":"2025-12-04T09:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.464563 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.464637 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.464655 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.464680 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.464705 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:02Z","lastTransitionTime":"2025-12-04T09:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.567165 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.567246 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.567270 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.567301 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.567324 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:02Z","lastTransitionTime":"2025-12-04T09:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.670246 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.670304 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.670319 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.670352 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.670364 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:02Z","lastTransitionTime":"2025-12-04T09:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.772779 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.772830 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.772845 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.772863 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.772877 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:02Z","lastTransitionTime":"2025-12-04T09:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.844607 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.844622 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:02 crc kubenswrapper[4707]: E1204 09:39:02.844827 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:02 crc kubenswrapper[4707]: E1204 09:39:02.845008 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.875591 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.875657 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.875679 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.875706 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.875729 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:02Z","lastTransitionTime":"2025-12-04T09:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.944840 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.944907 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.944933 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.944983 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.945010 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:02Z","lastTransitionTime":"2025-12-04T09:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:02 crc kubenswrapper[4707]: E1204 09:39:02.963790 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:02Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.968428 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.968450 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.968460 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.968475 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.968487 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:02Z","lastTransitionTime":"2025-12-04T09:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:02 crc kubenswrapper[4707]: E1204 09:39:02.988024 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:02Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.992998 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.993076 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.993116 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.993146 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:02 crc kubenswrapper[4707]: I1204 09:39:02.993167 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:02Z","lastTransitionTime":"2025-12-04T09:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:03 crc kubenswrapper[4707]: E1204 09:39:03.010816 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:03Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.015228 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.015282 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.015305 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.015382 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.015409 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:03Z","lastTransitionTime":"2025-12-04T09:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:03 crc kubenswrapper[4707]: E1204 09:39:03.027124 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:03Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.030677 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.030709 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.030724 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.030746 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.030760 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:03Z","lastTransitionTime":"2025-12-04T09:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:03 crc kubenswrapper[4707]: E1204 09:39:03.048140 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:03Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:03 crc kubenswrapper[4707]: E1204 09:39:03.048285 4707 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.049938 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.049994 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.050016 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.050042 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.050063 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:03Z","lastTransitionTime":"2025-12-04T09:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.152988 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.153060 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.153083 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.153125 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.153167 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:03Z","lastTransitionTime":"2025-12-04T09:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.256516 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.256609 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.256630 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.256663 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.256685 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:03Z","lastTransitionTime":"2025-12-04T09:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.293666 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs\") pod \"network-metrics-daemon-txkn2\" (UID: \"9a8009fd-d652-44fb-8ef1-73078262e8fa\") " pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:03 crc kubenswrapper[4707]: E1204 09:39:03.293935 4707 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:39:03 crc kubenswrapper[4707]: E1204 09:39:03.294076 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs podName:9a8009fd-d652-44fb-8ef1-73078262e8fa nodeName:}" failed. No retries permitted until 2025-12-04 09:39:11.294039666 +0000 UTC m=+50.729862363 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs") pod "network-metrics-daemon-txkn2" (UID: "9a8009fd-d652-44fb-8ef1-73078262e8fa") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.360617 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.360675 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.360689 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.360713 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.360727 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:03Z","lastTransitionTime":"2025-12-04T09:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.463020 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.463064 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.463078 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.463096 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.463108 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:03Z","lastTransitionTime":"2025-12-04T09:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.565043 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.565086 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.565098 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.565114 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.565129 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:03Z","lastTransitionTime":"2025-12-04T09:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.667094 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.667430 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.667443 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.667462 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.667475 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:03Z","lastTransitionTime":"2025-12-04T09:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.770523 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.770564 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.770572 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.770586 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.770597 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:03Z","lastTransitionTime":"2025-12-04T09:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.844975 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.845117 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:03 crc kubenswrapper[4707]: E1204 09:39:03.845145 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:03 crc kubenswrapper[4707]: E1204 09:39:03.845416 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.877418 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.878088 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.878180 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.878270 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.878410 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:03Z","lastTransitionTime":"2025-12-04T09:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.980917 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.981028 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.981051 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.981081 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:03 crc kubenswrapper[4707]: I1204 09:39:03.981103 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:03Z","lastTransitionTime":"2025-12-04T09:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.084722 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.084791 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.084812 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.084838 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.084856 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:04Z","lastTransitionTime":"2025-12-04T09:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.188050 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.188161 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.188179 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.188203 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.188221 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:04Z","lastTransitionTime":"2025-12-04T09:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.291109 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.291154 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.291163 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.291178 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.291188 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:04Z","lastTransitionTime":"2025-12-04T09:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.393754 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.393801 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.393811 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.393827 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.393836 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:04Z","lastTransitionTime":"2025-12-04T09:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.496473 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.496521 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.496537 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.496556 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.496567 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:04Z","lastTransitionTime":"2025-12-04T09:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.599253 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.599314 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.599362 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.599389 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.599406 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:04Z","lastTransitionTime":"2025-12-04T09:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.702092 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.702193 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.702217 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.702248 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.702272 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:04Z","lastTransitionTime":"2025-12-04T09:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.804697 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.804785 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.804810 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.804844 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.804871 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:04Z","lastTransitionTime":"2025-12-04T09:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.844741 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.844819 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:04 crc kubenswrapper[4707]: E1204 09:39:04.845450 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:04 crc kubenswrapper[4707]: E1204 09:39:04.845681 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.846026 4707 scope.go:117] "RemoveContainer" containerID="cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.907783 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.907871 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.907896 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.907930 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:04 crc kubenswrapper[4707]: I1204 09:39:04.907956 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:04Z","lastTransitionTime":"2025-12-04T09:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.011074 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.011137 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.011149 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.011165 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.011178 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:05Z","lastTransitionTime":"2025-12-04T09:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.114106 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.114163 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.114181 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.114203 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.114224 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:05Z","lastTransitionTime":"2025-12-04T09:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.269466 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.269494 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.269503 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.269515 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.269524 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:05Z","lastTransitionTime":"2025-12-04T09:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.372073 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.372125 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.372140 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.372159 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.372175 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:05Z","lastTransitionTime":"2025-12-04T09:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.474222 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.474277 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.474293 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.474318 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.474361 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:05Z","lastTransitionTime":"2025-12-04T09:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.576749 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.576796 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.576812 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.576829 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.576839 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:05Z","lastTransitionTime":"2025-12-04T09:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.654378 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/1.log" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.657667 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerStarted","Data":"dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc"} Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.658204 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.675355 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.680884 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.680945 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.680969 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.681001 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.681023 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:05Z","lastTransitionTime":"2025-12-04T09:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.697986 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.714379 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.738455 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.758711 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.770955 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.783519 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.783596 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.783610 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.783630 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.783647 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:05Z","lastTransitionTime":"2025-12-04T09:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.786609 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.802623 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.827315 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.840772 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.843931 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.843955 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:05 crc kubenswrapper[4707]: E1204 09:39:05.844043 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:05 crc kubenswrapper[4707]: E1204 09:39:05.844139 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.855299 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.878427 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.419952 6142 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:38:52.420456 6142 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 09:38:52.420484 6142 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1204 09:38:52.420574 6142 factory.go:656] Stopping watch factory\\\\nI1204 09:38:52.420594 6142 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420720 6142 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420943 6142 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1204 09:38:52.420600 6142 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 09:38:52.421023 6142 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 09:38:52.421449 6142 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:39:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.885947 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.886005 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.886022 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.886046 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.886062 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:05Z","lastTransitionTime":"2025-12-04T09:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.895071 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.913897 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.928043 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.940206 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.952625 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:05Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.989059 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.989104 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.989117 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.989133 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:05 crc kubenswrapper[4707]: I1204 09:39:05.989144 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:05Z","lastTransitionTime":"2025-12-04T09:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.092055 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.092120 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.092141 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.092167 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.092190 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:06Z","lastTransitionTime":"2025-12-04T09:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.196014 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.196103 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.196134 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.196167 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.196191 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:06Z","lastTransitionTime":"2025-12-04T09:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.298535 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.298585 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.298598 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.298617 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.298630 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:06Z","lastTransitionTime":"2025-12-04T09:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.401663 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.401739 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.401758 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.401784 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.401802 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:06Z","lastTransitionTime":"2025-12-04T09:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.505193 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.505262 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.505279 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.505302 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.505320 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:06Z","lastTransitionTime":"2025-12-04T09:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.608998 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.609057 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.609070 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.609093 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.609107 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:06Z","lastTransitionTime":"2025-12-04T09:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.662737 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/2.log" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.663599 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/1.log" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.666984 4707 generic.go:334] "Generic (PLEG): container finished" podID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerID="dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc" exitCode=1 Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.667044 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerDied","Data":"dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc"} Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.667102 4707 scope.go:117] "RemoveContainer" containerID="cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.667693 4707 scope.go:117] "RemoveContainer" containerID="dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc" Dec 04 09:39:06 crc kubenswrapper[4707]: E1204 09:39:06.667850 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.680841 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.693971 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.711022 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.711463 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.711507 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.711518 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.711538 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.711550 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:06Z","lastTransitionTime":"2025-12-04T09:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.727997 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.747289 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.762797 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.776584 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.787437 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.799084 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.814074 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.814126 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.814142 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.814162 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.814178 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:06Z","lastTransitionTime":"2025-12-04T09:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.817320 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.835169 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.844110 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.844151 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:06 crc kubenswrapper[4707]: E1204 09:39:06.844298 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:06 crc kubenswrapper[4707]: E1204 09:39:06.844434 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.852842 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.871445 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.889051 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4e56bbce1c1967b8dc270befa9551449b29111ef397f295b88df33e165cf44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:38:52Z\\\",\\\"message\\\":\\\"kg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.419952 6142 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:38:52.420456 6142 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 09:38:52.420484 6142 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1204 09:38:52.420574 6142 factory.go:656] Stopping watch factory\\\\nI1204 09:38:52.420594 6142 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420720 6142 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:38:52.420943 6142 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1204 09:38:52.420600 6142 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 09:38:52.421023 6142 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 09:38:52.421449 6142 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:05Z\\\",\\\"message\\\":\\\" 6355 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.810563 6355 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810603 6355 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810780 6355 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.811126 6355 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.811596 6355 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 09:39:05.811629 6355 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 09:39:05.811689 6355 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 09:39:05.811705 6355 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:39:05.811731 6355 factory.go:656] Stopping watch factory\\\\nI1204 09:39:05.811764 6355 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:39:05.811784 6355 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:39:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.909621 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.916053 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.916104 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.916113 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.916126 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.916135 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:06Z","lastTransitionTime":"2025-12-04T09:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.923981 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:06 crc kubenswrapper[4707]: I1204 09:39:06.938526 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:06Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.019065 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.019116 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.019132 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.019154 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.019171 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:07Z","lastTransitionTime":"2025-12-04T09:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.122798 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.122875 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.122914 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.122947 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.122972 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:07Z","lastTransitionTime":"2025-12-04T09:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.225499 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.225552 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.225566 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.225584 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.225597 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:07Z","lastTransitionTime":"2025-12-04T09:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.328688 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.328741 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.328754 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.328771 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.328785 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:07Z","lastTransitionTime":"2025-12-04T09:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.431945 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.432004 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.432014 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.432029 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.432039 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:07Z","lastTransitionTime":"2025-12-04T09:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.535177 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.535252 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.535274 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.535304 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.535327 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:07Z","lastTransitionTime":"2025-12-04T09:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.639017 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.639097 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.639110 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.639130 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.639143 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:07Z","lastTransitionTime":"2025-12-04T09:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.672624 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/2.log" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.677479 4707 scope.go:117] "RemoveContainer" containerID="dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc" Dec 04 09:39:07 crc kubenswrapper[4707]: E1204 09:39:07.677733 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.692088 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.704763 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.720971 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.741298 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.741393 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.741412 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.741434 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.741455 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:07Z","lastTransitionTime":"2025-12-04T09:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.743014 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.761089 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.776239 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.788868 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.803172 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.824349 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.838882 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.843458 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.843516 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.843532 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.843558 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.843574 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:07Z","lastTransitionTime":"2025-12-04T09:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.843912 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:07 crc kubenswrapper[4707]: E1204 09:39:07.844024 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.843921 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:07 crc kubenswrapper[4707]: E1204 09:39:07.844128 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.861742 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.887707 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:05Z\\\",\\\"message\\\":\\\" 6355 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.810563 6355 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810603 6355 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810780 6355 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.811126 6355 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.811596 6355 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 09:39:05.811629 6355 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 09:39:05.811689 6355 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 09:39:05.811705 6355 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:39:05.811731 6355 factory.go:656] Stopping watch factory\\\\nI1204 09:39:05.811764 6355 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:39:05.811784 6355 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:39:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.901735 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.917317 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.934129 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.943794 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.945371 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.945418 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.945434 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.945453 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.945467 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:07Z","lastTransitionTime":"2025-12-04T09:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:07 crc kubenswrapper[4707]: I1204 09:39:07.957357 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:07Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.048658 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.048730 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.048753 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.048785 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.049242 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:08Z","lastTransitionTime":"2025-12-04T09:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.152554 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.152638 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.152653 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.152672 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.152686 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:08Z","lastTransitionTime":"2025-12-04T09:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.256244 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.256312 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.256379 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.256411 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.256435 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:08Z","lastTransitionTime":"2025-12-04T09:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.359897 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.359963 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.360001 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.360033 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.360059 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:08Z","lastTransitionTime":"2025-12-04T09:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.463090 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.463151 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.463169 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.463197 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.463216 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:08Z","lastTransitionTime":"2025-12-04T09:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.567525 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.567599 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.567625 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.567657 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.567678 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:08Z","lastTransitionTime":"2025-12-04T09:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.670193 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.670250 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.670263 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.670282 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.670299 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:08Z","lastTransitionTime":"2025-12-04T09:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.774783 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.774860 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.774886 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.774916 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.774940 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:08Z","lastTransitionTime":"2025-12-04T09:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.844212 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.844316 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:08 crc kubenswrapper[4707]: E1204 09:39:08.844391 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:08 crc kubenswrapper[4707]: E1204 09:39:08.844615 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.877760 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.877858 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.877881 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.877907 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.877925 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:08Z","lastTransitionTime":"2025-12-04T09:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.980566 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.980631 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.980648 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.980671 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:08 crc kubenswrapper[4707]: I1204 09:39:08.980689 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:08Z","lastTransitionTime":"2025-12-04T09:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.084667 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.084733 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.084743 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.084760 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.084773 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:09Z","lastTransitionTime":"2025-12-04T09:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.187955 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.188018 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.188034 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.188056 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.188076 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:09Z","lastTransitionTime":"2025-12-04T09:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.291531 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.291600 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.291623 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.291683 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.291721 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:09Z","lastTransitionTime":"2025-12-04T09:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.394312 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.394392 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.394403 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.394419 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.394433 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:09Z","lastTransitionTime":"2025-12-04T09:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.496777 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.496820 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.496863 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.496884 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.496896 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:09Z","lastTransitionTime":"2025-12-04T09:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.599735 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.599789 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.599805 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.599830 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.599850 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:09Z","lastTransitionTime":"2025-12-04T09:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.703054 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.703135 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.703159 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.703192 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.703215 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:09Z","lastTransitionTime":"2025-12-04T09:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.806786 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.806863 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.806880 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.806903 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.806921 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:09Z","lastTransitionTime":"2025-12-04T09:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.844087 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.844258 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:09 crc kubenswrapper[4707]: E1204 09:39:09.844513 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:09 crc kubenswrapper[4707]: E1204 09:39:09.844702 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.868143 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.879016 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.890757 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:09Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.905954 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:09Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.909945 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.909991 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.910004 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.910022 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.910039 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:09Z","lastTransitionTime":"2025-12-04T09:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.924738 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:09Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.940315 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:09Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.959204 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:09Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.979766 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:09Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:09 crc kubenswrapper[4707]: I1204 09:39:09.999026 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:09Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.012858 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.012910 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.012922 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.012938 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.012950 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:10Z","lastTransitionTime":"2025-12-04T09:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.018890 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.036428 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.053816 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.077512 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.096779 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.113865 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.116453 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.116544 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.116572 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.116607 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.116633 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:10Z","lastTransitionTime":"2025-12-04T09:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.131569 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.155268 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:05Z\\\",\\\"message\\\":\\\" 6355 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.810563 6355 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810603 6355 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810780 6355 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.811126 6355 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.811596 6355 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 09:39:05.811629 6355 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 09:39:05.811689 6355 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 09:39:05.811705 6355 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:39:05.811731 6355 factory.go:656] Stopping watch factory\\\\nI1204 09:39:05.811764 6355 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:39:05.811784 6355 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:39:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.178446 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.192263 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.219110 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.219152 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.219163 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.219182 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.219195 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:10Z","lastTransitionTime":"2025-12-04T09:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.321604 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.321652 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.321693 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.321711 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.321722 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:10Z","lastTransitionTime":"2025-12-04T09:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.425392 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.425433 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.425442 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.425456 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.425466 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:10Z","lastTransitionTime":"2025-12-04T09:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.521457 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.521567 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.521594 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.521633 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:39:42.52159825 +0000 UTC m=+81.957420787 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.521661 4707 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.521783 4707 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.521843 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:39:42.521813767 +0000 UTC m=+81.957636444 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.521884 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:39:42.521861698 +0000 UTC m=+81.957684395 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.528226 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.528278 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.528393 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.528446 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.528470 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:10Z","lastTransitionTime":"2025-12-04T09:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.623387 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.623523 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.623643 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.623694 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.623710 4707 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.623802 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 09:39:42.623776542 +0000 UTC m=+82.059599049 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.623883 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.623943 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.623968 4707 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.624079 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 09:39:42.624048351 +0000 UTC m=+82.059870898 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.631425 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.631472 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.631485 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.631504 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.631517 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:10Z","lastTransitionTime":"2025-12-04T09:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.734801 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.734860 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.734874 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.734893 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.734906 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:10Z","lastTransitionTime":"2025-12-04T09:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.838411 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.838478 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.838503 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.838529 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.838546 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:10Z","lastTransitionTime":"2025-12-04T09:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.843966 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.844002 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.844204 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:10 crc kubenswrapper[4707]: E1204 09:39:10.844206 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.867500 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.883270 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.901557 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.920135 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.936706 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.941224 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.941260 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.941276 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.941297 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.941315 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:10Z","lastTransitionTime":"2025-12-04T09:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.950091 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.962902 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.975603 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:10 crc kubenswrapper[4707]: I1204 09:39:10.992496 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:10Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.006400 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:11Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.025196 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:05Z\\\",\\\"message\\\":\\\" 6355 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.810563 6355 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810603 6355 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810780 6355 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.811126 6355 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.811596 6355 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 09:39:05.811629 6355 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 09:39:05.811689 6355 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 09:39:05.811705 6355 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:39:05.811731 6355 factory.go:656] Stopping watch factory\\\\nI1204 09:39:05.811764 6355 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:39:05.811784 6355 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:39:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:11Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.043757 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.043805 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.043821 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.043840 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.043854 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:11Z","lastTransitionTime":"2025-12-04T09:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.047054 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:11Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.060940 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:11Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.078840 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:11Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.090244 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:11Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.100631 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:11Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.112318 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:11Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.123976 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5682ec3-78a9-47a8-a2bd-e4e58e4a5711\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b948fa6cd68a641df1c78251110c4e49e944be9affe87715c5b80be44f60ca00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e160007086212e3c54a49bdfcd7a43776b81c7561b18d963f0667181e2238c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ac30713fd3273b83ee133e02d9575ce884e1732adf4b612d134c208fd9cbf0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:11Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.145911 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.145961 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.145971 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.145984 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.145995 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:11Z","lastTransitionTime":"2025-12-04T09:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.248768 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.248795 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.248821 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.248833 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.248841 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:11Z","lastTransitionTime":"2025-12-04T09:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.331159 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs\") pod \"network-metrics-daemon-txkn2\" (UID: \"9a8009fd-d652-44fb-8ef1-73078262e8fa\") " pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:11 crc kubenswrapper[4707]: E1204 09:39:11.331298 4707 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:39:11 crc kubenswrapper[4707]: E1204 09:39:11.331378 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs podName:9a8009fd-d652-44fb-8ef1-73078262e8fa nodeName:}" failed. No retries permitted until 2025-12-04 09:39:27.331363501 +0000 UTC m=+66.767186008 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs") pod "network-metrics-daemon-txkn2" (UID: "9a8009fd-d652-44fb-8ef1-73078262e8fa") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.351550 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.351599 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.351616 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.351640 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.351657 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:11Z","lastTransitionTime":"2025-12-04T09:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.455060 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.455124 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.455142 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.455165 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.455183 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:11Z","lastTransitionTime":"2025-12-04T09:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.559184 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.559318 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.559390 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.559426 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.559451 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:11Z","lastTransitionTime":"2025-12-04T09:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.663093 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.663142 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.663154 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.663172 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.663185 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:11Z","lastTransitionTime":"2025-12-04T09:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.766766 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.766828 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.766852 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.766884 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.766907 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:11Z","lastTransitionTime":"2025-12-04T09:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.844634 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.844628 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:11 crc kubenswrapper[4707]: E1204 09:39:11.844812 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:11 crc kubenswrapper[4707]: E1204 09:39:11.844989 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.869900 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.869962 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.869980 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.870009 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.870032 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:11Z","lastTransitionTime":"2025-12-04T09:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.975392 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.975444 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.975463 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.975485 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:11 crc kubenswrapper[4707]: I1204 09:39:11.975499 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:11Z","lastTransitionTime":"2025-12-04T09:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.078945 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.079368 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.079391 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.079414 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.079430 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:12Z","lastTransitionTime":"2025-12-04T09:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.181712 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.181758 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.181772 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.181790 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.181804 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:12Z","lastTransitionTime":"2025-12-04T09:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.285593 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.285655 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.285685 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.285711 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.285728 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:12Z","lastTransitionTime":"2025-12-04T09:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.389763 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.389958 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.390038 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.390061 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.390096 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:12Z","lastTransitionTime":"2025-12-04T09:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.493918 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.493955 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.493966 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.493982 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.493993 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:12Z","lastTransitionTime":"2025-12-04T09:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.595801 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.595852 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.595867 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.595888 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.595901 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:12Z","lastTransitionTime":"2025-12-04T09:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.697898 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.697944 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.697955 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.697970 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.697982 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:12Z","lastTransitionTime":"2025-12-04T09:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.800924 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.800950 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.800960 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.800974 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.800985 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:12Z","lastTransitionTime":"2025-12-04T09:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.844704 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.844801 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:12 crc kubenswrapper[4707]: E1204 09:39:12.844835 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:12 crc kubenswrapper[4707]: E1204 09:39:12.844991 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.903069 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.903103 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.903112 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.903124 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:12 crc kubenswrapper[4707]: I1204 09:39:12.903134 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:12Z","lastTransitionTime":"2025-12-04T09:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.006057 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.006152 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.006178 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.006216 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.006261 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:13Z","lastTransitionTime":"2025-12-04T09:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.109711 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.109778 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.109802 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.109834 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.109858 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:13Z","lastTransitionTime":"2025-12-04T09:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.212888 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.212947 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.212969 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.213001 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.213022 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:13Z","lastTransitionTime":"2025-12-04T09:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.316477 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.316542 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.316566 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.316595 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.316619 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:13Z","lastTransitionTime":"2025-12-04T09:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.414282 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.414369 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.414387 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.414411 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.414428 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:13Z","lastTransitionTime":"2025-12-04T09:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:13 crc kubenswrapper[4707]: E1204 09:39:13.433989 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:13Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.438717 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.438765 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.438779 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.438800 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.438816 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:13Z","lastTransitionTime":"2025-12-04T09:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:13 crc kubenswrapper[4707]: E1204 09:39:13.454090 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:13Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.459296 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.459406 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.459426 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.459479 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.459497 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:13Z","lastTransitionTime":"2025-12-04T09:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:13 crc kubenswrapper[4707]: E1204 09:39:13.477111 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:13Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.481812 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.481867 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.481885 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.481908 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.481926 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:13Z","lastTransitionTime":"2025-12-04T09:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:13 crc kubenswrapper[4707]: E1204 09:39:13.504375 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:13Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.508310 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.508423 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.508436 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.508453 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.508464 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:13Z","lastTransitionTime":"2025-12-04T09:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:13 crc kubenswrapper[4707]: E1204 09:39:13.520997 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:13Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:13 crc kubenswrapper[4707]: E1204 09:39:13.521279 4707 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.523377 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.523420 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.523436 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.523459 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.523476 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:13Z","lastTransitionTime":"2025-12-04T09:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.626093 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.626138 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.626186 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.626209 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.626227 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:13Z","lastTransitionTime":"2025-12-04T09:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.729093 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.729140 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.729153 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.729169 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.729181 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:13Z","lastTransitionTime":"2025-12-04T09:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.831635 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.831692 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.831710 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.831733 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.831750 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:13Z","lastTransitionTime":"2025-12-04T09:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.843917 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.843952 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:13 crc kubenswrapper[4707]: E1204 09:39:13.844083 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:13 crc kubenswrapper[4707]: E1204 09:39:13.844167 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.935263 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.935402 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.935431 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.935463 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:13 crc kubenswrapper[4707]: I1204 09:39:13.935487 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:13Z","lastTransitionTime":"2025-12-04T09:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.038370 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.038420 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.038436 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.038459 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.038475 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:14Z","lastTransitionTime":"2025-12-04T09:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.141504 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.141652 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.141662 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.141675 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.141684 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:14Z","lastTransitionTime":"2025-12-04T09:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.244126 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.244186 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.244209 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.244238 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.244261 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:14Z","lastTransitionTime":"2025-12-04T09:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.347727 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.347786 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.347806 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.347830 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.347848 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:14Z","lastTransitionTime":"2025-12-04T09:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.450961 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.451046 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.451079 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.451112 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.451134 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:14Z","lastTransitionTime":"2025-12-04T09:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.554553 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.554628 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.554725 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.554806 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.554835 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:14Z","lastTransitionTime":"2025-12-04T09:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.656999 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.657046 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.657059 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.657077 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.657089 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:14Z","lastTransitionTime":"2025-12-04T09:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.759593 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.759642 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.759653 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.759670 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.759683 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:14Z","lastTransitionTime":"2025-12-04T09:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.843926 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.844025 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:14 crc kubenswrapper[4707]: E1204 09:39:14.844092 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:14 crc kubenswrapper[4707]: E1204 09:39:14.844247 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.863158 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.863237 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.863264 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.863291 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.863314 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:14Z","lastTransitionTime":"2025-12-04T09:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.965619 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.965679 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.965696 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.965717 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:14 crc kubenswrapper[4707]: I1204 09:39:14.965735 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:14Z","lastTransitionTime":"2025-12-04T09:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.069362 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.069452 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.069474 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.069505 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.069527 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:15Z","lastTransitionTime":"2025-12-04T09:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.172402 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.172476 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.172496 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.172520 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.172539 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:15Z","lastTransitionTime":"2025-12-04T09:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.305306 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.305412 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.305436 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.305462 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.305494 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:15Z","lastTransitionTime":"2025-12-04T09:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.407668 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.407701 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.407709 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.407724 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.407733 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:15Z","lastTransitionTime":"2025-12-04T09:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.509773 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.509807 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.509815 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.509827 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.509835 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:15Z","lastTransitionTime":"2025-12-04T09:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.612868 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.612936 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.612953 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.612978 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.612995 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:15Z","lastTransitionTime":"2025-12-04T09:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.718679 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.718777 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.718803 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.718837 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.718857 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:15Z","lastTransitionTime":"2025-12-04T09:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.821829 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.821875 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.821885 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.821898 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.821908 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:15Z","lastTransitionTime":"2025-12-04T09:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.844376 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.844532 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:15 crc kubenswrapper[4707]: E1204 09:39:15.844624 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:15 crc kubenswrapper[4707]: E1204 09:39:15.844756 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.924848 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.924883 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.924891 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.924903 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:15 crc kubenswrapper[4707]: I1204 09:39:15.924911 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:15Z","lastTransitionTime":"2025-12-04T09:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.027742 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.027791 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.027801 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.027816 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.027826 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:16Z","lastTransitionTime":"2025-12-04T09:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.130715 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.130774 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.130785 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.130799 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.130837 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:16Z","lastTransitionTime":"2025-12-04T09:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.233663 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.233748 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.233763 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.233782 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.233818 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:16Z","lastTransitionTime":"2025-12-04T09:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.336364 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.336393 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.336402 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.336415 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.336426 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:16Z","lastTransitionTime":"2025-12-04T09:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.438996 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.439086 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.439112 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.439148 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.439174 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:16Z","lastTransitionTime":"2025-12-04T09:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.541863 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.541905 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.541914 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.541929 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.541940 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:16Z","lastTransitionTime":"2025-12-04T09:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.644311 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.644383 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.644393 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.644407 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.644416 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:16Z","lastTransitionTime":"2025-12-04T09:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.746445 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.746492 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.746512 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.746528 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.746540 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:16Z","lastTransitionTime":"2025-12-04T09:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.844443 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.844538 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:16 crc kubenswrapper[4707]: E1204 09:39:16.844586 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:16 crc kubenswrapper[4707]: E1204 09:39:16.844754 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.848708 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.848748 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.848759 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.848775 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.848787 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:16Z","lastTransitionTime":"2025-12-04T09:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.951087 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.951140 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.951153 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.951170 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:16 crc kubenswrapper[4707]: I1204 09:39:16.951182 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:16Z","lastTransitionTime":"2025-12-04T09:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.054307 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.054380 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.054392 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.054409 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.054421 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:17Z","lastTransitionTime":"2025-12-04T09:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.157245 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.157290 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.157300 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.157315 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.157325 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:17Z","lastTransitionTime":"2025-12-04T09:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.259988 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.260033 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.260044 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.260060 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.260073 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:17Z","lastTransitionTime":"2025-12-04T09:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.363417 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.363475 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.363493 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.363513 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.363526 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:17Z","lastTransitionTime":"2025-12-04T09:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.465951 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.465992 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.466001 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.466013 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.466023 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:17Z","lastTransitionTime":"2025-12-04T09:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.568803 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.568837 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.568849 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.568865 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.568878 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:17Z","lastTransitionTime":"2025-12-04T09:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.671806 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.671873 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.671891 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.671964 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.671988 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:17Z","lastTransitionTime":"2025-12-04T09:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.774427 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.774456 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.774464 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.774477 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.774486 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:17Z","lastTransitionTime":"2025-12-04T09:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.844853 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.844883 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:17 crc kubenswrapper[4707]: E1204 09:39:17.845076 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:17 crc kubenswrapper[4707]: E1204 09:39:17.845141 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.877463 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.877529 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.877552 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.877578 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.877600 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:17Z","lastTransitionTime":"2025-12-04T09:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.980293 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.980414 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.980435 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.980461 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:17 crc kubenswrapper[4707]: I1204 09:39:17.980478 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:17Z","lastTransitionTime":"2025-12-04T09:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.083932 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.084005 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.084020 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.084046 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.084059 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:18Z","lastTransitionTime":"2025-12-04T09:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.187103 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.187158 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.187168 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.187191 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.187203 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:18Z","lastTransitionTime":"2025-12-04T09:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.289843 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.289907 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.289919 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.289939 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.289955 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:18Z","lastTransitionTime":"2025-12-04T09:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.393703 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.393752 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.393763 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.393783 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.393798 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:18Z","lastTransitionTime":"2025-12-04T09:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.497394 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.497436 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.497447 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.497465 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.497476 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:18Z","lastTransitionTime":"2025-12-04T09:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.601260 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.601356 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.601372 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.601397 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.601411 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:18Z","lastTransitionTime":"2025-12-04T09:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.704670 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.704722 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.704736 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.704755 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.704768 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:18Z","lastTransitionTime":"2025-12-04T09:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.807665 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.807717 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.807730 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.807748 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.807763 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:18Z","lastTransitionTime":"2025-12-04T09:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.844091 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:18 crc kubenswrapper[4707]: E1204 09:39:18.844278 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.844444 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:18 crc kubenswrapper[4707]: E1204 09:39:18.844656 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.910844 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.911209 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.911400 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.911691 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:18 crc kubenswrapper[4707]: I1204 09:39:18.911847 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:18Z","lastTransitionTime":"2025-12-04T09:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.015053 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.015448 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.015737 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.015947 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.016132 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:19Z","lastTransitionTime":"2025-12-04T09:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.118968 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.119034 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.119055 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.119081 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.119106 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:19Z","lastTransitionTime":"2025-12-04T09:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.222453 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.222496 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.222509 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.222526 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.222541 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:19Z","lastTransitionTime":"2025-12-04T09:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.324155 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.324203 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.324216 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.324232 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.324248 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:19Z","lastTransitionTime":"2025-12-04T09:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.427315 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.427753 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.427952 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.428172 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.428312 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:19Z","lastTransitionTime":"2025-12-04T09:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.531122 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.531402 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.531551 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.531673 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.531845 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:19Z","lastTransitionTime":"2025-12-04T09:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.634717 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.634761 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.634770 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.634784 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.634794 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:19Z","lastTransitionTime":"2025-12-04T09:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.737437 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.737483 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.737493 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.737507 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.737526 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:19Z","lastTransitionTime":"2025-12-04T09:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.839706 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.839744 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.839753 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.839766 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.839775 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:19Z","lastTransitionTime":"2025-12-04T09:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.844057 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.844115 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:19 crc kubenswrapper[4707]: E1204 09:39:19.844160 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:19 crc kubenswrapper[4707]: E1204 09:39:19.844288 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.943037 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.943093 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.943112 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.943135 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:19 crc kubenswrapper[4707]: I1204 09:39:19.943159 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:19Z","lastTransitionTime":"2025-12-04T09:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.046277 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.046327 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.046364 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.046389 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.046408 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:20Z","lastTransitionTime":"2025-12-04T09:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.149514 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.149584 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.149604 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.149628 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.149646 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:20Z","lastTransitionTime":"2025-12-04T09:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.252720 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.252774 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.252782 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.252796 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.252823 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:20Z","lastTransitionTime":"2025-12-04T09:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.355963 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.356016 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.356035 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.356057 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.356071 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:20Z","lastTransitionTime":"2025-12-04T09:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.458829 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.458886 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.458900 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.458923 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.458938 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:20Z","lastTransitionTime":"2025-12-04T09:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.562661 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.562729 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.562752 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.562784 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.562807 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:20Z","lastTransitionTime":"2025-12-04T09:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.665727 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.665828 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.665843 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.665870 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.665886 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:20Z","lastTransitionTime":"2025-12-04T09:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.768486 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.768562 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.768587 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.768619 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.768647 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:20Z","lastTransitionTime":"2025-12-04T09:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.843959 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:20 crc kubenswrapper[4707]: E1204 09:39:20.844120 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.844178 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:20 crc kubenswrapper[4707]: E1204 09:39:20.844396 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.862996 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:20Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.871184 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.871257 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.871273 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.871287 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.871298 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:20Z","lastTransitionTime":"2025-12-04T09:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.880458 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:20Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.897813 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:20Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.914559 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:20Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.932824 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:20Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.951107 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:20Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.969066 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:20Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.973587 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.973651 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.973696 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.973723 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.973742 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:20Z","lastTransitionTime":"2025-12-04T09:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:20 crc kubenswrapper[4707]: I1204 09:39:20.982784 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:20Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.010237 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:21Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.025811 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:21Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.039803 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:21Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.056246 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:05Z\\\",\\\"message\\\":\\\" 6355 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.810563 6355 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810603 6355 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810780 6355 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.811126 6355 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.811596 6355 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 09:39:05.811629 6355 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 09:39:05.811689 6355 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 09:39:05.811705 6355 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:39:05.811731 6355 factory.go:656] Stopping watch factory\\\\nI1204 09:39:05.811764 6355 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:39:05.811784 6355 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:39:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:21Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.070523 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5682ec3-78a9-47a8-a2bd-e4e58e4a5711\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b948fa6cd68a641df1c78251110c4e49e944be9affe87715c5b80be44f60ca00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e160007086212e3c54a49bdfcd7a43776b81c7561b18d963f0667181e2238c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ac30713fd3273b83ee133e02d9575ce884e1732adf4b612d134c208fd9cbf0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:21Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.078792 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.078842 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.078858 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.078881 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.078897 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:21Z","lastTransitionTime":"2025-12-04T09:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.087909 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:21Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.102651 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:21Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.116321 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:21Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.133738 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:21Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.149235 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:21Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.181368 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.181456 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.181507 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.181532 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.181549 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:21Z","lastTransitionTime":"2025-12-04T09:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.283952 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.284012 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.284029 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.284053 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.284070 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:21Z","lastTransitionTime":"2025-12-04T09:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.387355 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.387412 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.387446 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.387495 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.387517 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:21Z","lastTransitionTime":"2025-12-04T09:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.489823 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.489895 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.489924 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.489955 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.489985 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:21Z","lastTransitionTime":"2025-12-04T09:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.592604 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.592645 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.592656 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.592672 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.592684 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:21Z","lastTransitionTime":"2025-12-04T09:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.695972 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.696321 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.696507 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.696650 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.696771 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:21Z","lastTransitionTime":"2025-12-04T09:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.800267 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.800679 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.800856 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.801048 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.801246 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:21Z","lastTransitionTime":"2025-12-04T09:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.843923 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:21 crc kubenswrapper[4707]: E1204 09:39:21.844480 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.844327 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:21 crc kubenswrapper[4707]: E1204 09:39:21.844785 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.904501 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.905131 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.905226 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.905377 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:21 crc kubenswrapper[4707]: I1204 09:39:21.905639 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:21Z","lastTransitionTime":"2025-12-04T09:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.008446 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.008723 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.008826 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.008894 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.008961 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:22Z","lastTransitionTime":"2025-12-04T09:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.111667 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.111993 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.112074 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.112147 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.112213 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:22Z","lastTransitionTime":"2025-12-04T09:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.214648 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.214675 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.214683 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.214697 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.214705 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:22Z","lastTransitionTime":"2025-12-04T09:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.317324 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.317378 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.317387 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.317400 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.317410 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:22Z","lastTransitionTime":"2025-12-04T09:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.420985 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.421050 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.421070 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.421094 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.421113 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:22Z","lastTransitionTime":"2025-12-04T09:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.523857 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.523917 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.523935 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.523959 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.523991 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:22Z","lastTransitionTime":"2025-12-04T09:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.627244 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.627292 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.627302 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.627317 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.627328 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:22Z","lastTransitionTime":"2025-12-04T09:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.739077 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.739212 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.739230 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.739248 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.739293 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:22Z","lastTransitionTime":"2025-12-04T09:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.845069 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.845221 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:22 crc kubenswrapper[4707]: E1204 09:39:22.845791 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:22 crc kubenswrapper[4707]: E1204 09:39:22.846037 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.846599 4707 scope.go:117] "RemoveContainer" containerID="dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.848224 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.848251 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.848261 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.848276 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.848286 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:22Z","lastTransitionTime":"2025-12-04T09:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:22 crc kubenswrapper[4707]: E1204 09:39:22.852189 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.951088 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.951227 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.951257 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.951291 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:22 crc kubenswrapper[4707]: I1204 09:39:22.951315 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:22Z","lastTransitionTime":"2025-12-04T09:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.053442 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.053513 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.053533 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.053558 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.053581 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.156601 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.156678 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.156696 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.156720 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.156738 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.261164 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.261819 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.261917 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.262008 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.262106 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.364285 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.364614 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.364939 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.365042 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.365127 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.467799 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.468100 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.468182 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.468267 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.468369 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.570945 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.570985 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.570997 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.571013 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.571025 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.673594 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.673651 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.673668 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.673693 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.673711 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.776553 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.776636 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.776659 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.776689 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.776710 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.844416 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.844556 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:23 crc kubenswrapper[4707]: E1204 09:39:23.844655 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:23 crc kubenswrapper[4707]: E1204 09:39:23.844860 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.879284 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.879321 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.879330 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.879365 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.879376 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.887977 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.888024 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.888039 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.888060 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.888077 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: E1204 09:39:23.904634 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:23Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.910076 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.910121 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.910137 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.910154 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.910165 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: E1204 09:39:23.928126 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:23Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.933039 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.933075 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.933088 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.933110 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.933125 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: E1204 09:39:23.952157 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:23Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.955978 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.956018 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.956032 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.956047 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.956059 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: E1204 09:39:23.972383 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:23Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.978044 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.978139 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.978160 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.978193 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.978213 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:23 crc kubenswrapper[4707]: E1204 09:39:23.995804 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:23Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:23 crc kubenswrapper[4707]: E1204 09:39:23.995991 4707 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.998073 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.998127 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.998141 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.998164 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:23 crc kubenswrapper[4707]: I1204 09:39:23.998179 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:23Z","lastTransitionTime":"2025-12-04T09:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.101034 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.101083 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.101093 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.101107 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.101116 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:24Z","lastTransitionTime":"2025-12-04T09:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.203238 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.203293 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.203311 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.203363 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.203383 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:24Z","lastTransitionTime":"2025-12-04T09:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.306294 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.306360 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.306371 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.306390 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.306405 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:24Z","lastTransitionTime":"2025-12-04T09:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.408862 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.408902 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.408913 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.408927 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.408938 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:24Z","lastTransitionTime":"2025-12-04T09:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.513165 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.513203 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.513215 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.513232 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.513248 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:24Z","lastTransitionTime":"2025-12-04T09:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.616530 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.616586 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.616597 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.616617 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.616630 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:24Z","lastTransitionTime":"2025-12-04T09:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.719293 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.719358 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.719371 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.719392 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.719403 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:24Z","lastTransitionTime":"2025-12-04T09:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.822415 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.822473 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.822485 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.822505 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.822522 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:24Z","lastTransitionTime":"2025-12-04T09:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.843968 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.843987 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:24 crc kubenswrapper[4707]: E1204 09:39:24.844091 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:24 crc kubenswrapper[4707]: E1204 09:39:24.844175 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.925280 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.925318 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.925330 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.925363 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:24 crc kubenswrapper[4707]: I1204 09:39:24.925375 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:24Z","lastTransitionTime":"2025-12-04T09:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.028183 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.028222 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.028233 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.028248 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.028262 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:25Z","lastTransitionTime":"2025-12-04T09:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.129952 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.129984 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.129995 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.130010 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.130019 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:25Z","lastTransitionTime":"2025-12-04T09:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.233503 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.233580 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.233599 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.233623 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.233641 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:25Z","lastTransitionTime":"2025-12-04T09:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.336028 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.336069 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.336078 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.336090 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.336101 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:25Z","lastTransitionTime":"2025-12-04T09:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.438630 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.438669 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.438682 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.438702 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.438719 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:25Z","lastTransitionTime":"2025-12-04T09:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.541813 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.541860 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.541872 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.541889 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.541901 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:25Z","lastTransitionTime":"2025-12-04T09:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.644834 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.644884 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.644898 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.644914 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.644926 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:25Z","lastTransitionTime":"2025-12-04T09:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.748198 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.748272 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.748294 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.748316 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.748550 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:25Z","lastTransitionTime":"2025-12-04T09:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.844953 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:25 crc kubenswrapper[4707]: E1204 09:39:25.845128 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.844986 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:25 crc kubenswrapper[4707]: E1204 09:39:25.845239 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.851141 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.851195 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.851209 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.851230 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.851242 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:25Z","lastTransitionTime":"2025-12-04T09:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.954293 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.954447 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.954484 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.954514 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:25 crc kubenswrapper[4707]: I1204 09:39:25.954536 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:25Z","lastTransitionTime":"2025-12-04T09:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.058528 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.058595 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.058615 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.058648 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.058670 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:26Z","lastTransitionTime":"2025-12-04T09:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.161612 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.161644 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.161655 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.161668 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.161677 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:26Z","lastTransitionTime":"2025-12-04T09:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.264546 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.264624 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.264650 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.264696 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.264733 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:26Z","lastTransitionTime":"2025-12-04T09:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.368045 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.368086 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.368094 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.368110 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.368124 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:26Z","lastTransitionTime":"2025-12-04T09:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.471076 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.471139 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.471149 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.471165 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.471179 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:26Z","lastTransitionTime":"2025-12-04T09:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.573620 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.573694 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.573721 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.573748 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.573767 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:26Z","lastTransitionTime":"2025-12-04T09:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.676930 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.676974 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.676991 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.677012 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.677029 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:26Z","lastTransitionTime":"2025-12-04T09:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.779751 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.779787 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.779799 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.779815 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.779827 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:26Z","lastTransitionTime":"2025-12-04T09:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.844654 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.844705 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:26 crc kubenswrapper[4707]: E1204 09:39:26.844801 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:26 crc kubenswrapper[4707]: E1204 09:39:26.845470 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.883283 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.883349 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.883361 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.883377 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.883389 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:26Z","lastTransitionTime":"2025-12-04T09:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.985761 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.985797 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.985806 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.985854 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:26 crc kubenswrapper[4707]: I1204 09:39:26.985865 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:26Z","lastTransitionTime":"2025-12-04T09:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.089202 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.089252 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.089267 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.089289 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.089302 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:27Z","lastTransitionTime":"2025-12-04T09:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.191681 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.191809 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.191843 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.191873 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.191895 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:27Z","lastTransitionTime":"2025-12-04T09:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.295234 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.295284 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.295302 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.295327 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.295373 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:27Z","lastTransitionTime":"2025-12-04T09:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.342828 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs\") pod \"network-metrics-daemon-txkn2\" (UID: \"9a8009fd-d652-44fb-8ef1-73078262e8fa\") " pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:27 crc kubenswrapper[4707]: E1204 09:39:27.343062 4707 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:39:27 crc kubenswrapper[4707]: E1204 09:39:27.343461 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs podName:9a8009fd-d652-44fb-8ef1-73078262e8fa nodeName:}" failed. No retries permitted until 2025-12-04 09:39:59.343125657 +0000 UTC m=+98.778948194 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs") pod "network-metrics-daemon-txkn2" (UID: "9a8009fd-d652-44fb-8ef1-73078262e8fa") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.399304 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.399396 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.399417 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.399442 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.399463 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:27Z","lastTransitionTime":"2025-12-04T09:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.503555 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.503592 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.503604 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.503620 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.503632 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:27Z","lastTransitionTime":"2025-12-04T09:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.606404 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.606454 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.606470 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.606489 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.606501 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:27Z","lastTransitionTime":"2025-12-04T09:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.708630 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.708667 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.708677 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.708689 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.708700 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:27Z","lastTransitionTime":"2025-12-04T09:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.811200 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.811442 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.811565 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.811659 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.811744 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:27Z","lastTransitionTime":"2025-12-04T09:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.844459 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.844544 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:27 crc kubenswrapper[4707]: E1204 09:39:27.844648 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:27 crc kubenswrapper[4707]: E1204 09:39:27.844824 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.913552 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.913594 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.913604 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.913620 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:27 crc kubenswrapper[4707]: I1204 09:39:27.913632 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:27Z","lastTransitionTime":"2025-12-04T09:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.016624 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.016886 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.017005 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.017075 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.017145 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:28Z","lastTransitionTime":"2025-12-04T09:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.120525 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.120778 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.120910 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.121029 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.121134 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:28Z","lastTransitionTime":"2025-12-04T09:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.224978 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.225494 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.225582 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.225677 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.225779 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:28Z","lastTransitionTime":"2025-12-04T09:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.329541 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.329608 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.329627 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.329652 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.329670 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:28Z","lastTransitionTime":"2025-12-04T09:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.431882 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.432664 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.432759 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.432845 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.432934 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:28Z","lastTransitionTime":"2025-12-04T09:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.535477 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.535549 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.535593 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.535628 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.535647 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:28Z","lastTransitionTime":"2025-12-04T09:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.638256 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.638577 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.638655 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.638754 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.638874 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:28Z","lastTransitionTime":"2025-12-04T09:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.741978 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.742043 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.742060 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.742083 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.742098 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:28Z","lastTransitionTime":"2025-12-04T09:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.778038 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-npc85_e9d3467a-1f4a-4d54-97b3-c7fd062eff13/kube-multus/0.log" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.778108 4707 generic.go:334] "Generic (PLEG): container finished" podID="e9d3467a-1f4a-4d54-97b3-c7fd062eff13" containerID="04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b" exitCode=1 Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.778161 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-npc85" event={"ID":"e9d3467a-1f4a-4d54-97b3-c7fd062eff13","Type":"ContainerDied","Data":"04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b"} Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.778709 4707 scope.go:117] "RemoveContainer" containerID="04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.790374 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.805624 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5682ec3-78a9-47a8-a2bd-e4e58e4a5711\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b948fa6cd68a641df1c78251110c4e49e944be9affe87715c5b80be44f60ca00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e160007086212e3c54a49bdfcd7a43776b81c7561b18d963f0667181e2238c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ac30713fd3273b83ee133e02d9575ce884e1732adf4b612d134c208fd9cbf0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.820228 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.836075 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.843895 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:28 crc kubenswrapper[4707]: E1204 09:39:28.843986 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.844258 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.844293 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.844303 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.844317 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.844327 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:28Z","lastTransitionTime":"2025-12-04T09:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.844903 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:28 crc kubenswrapper[4707]: E1204 09:39:28.845018 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.848798 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.862260 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.874140 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.884949 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.898198 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:28Z\\\",\\\"message\\\":\\\"2025-12-04T09:38:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1e152b2d-b9f8-4fb5-a632-b380fe49a000\\\\n2025-12-04T09:38:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1e152b2d-b9f8-4fb5-a632-b380fe49a000 to /host/opt/cni/bin/\\\\n2025-12-04T09:38:43Z [verbose] multus-daemon started\\\\n2025-12-04T09:38:43Z [verbose] Readiness Indicator file check\\\\n2025-12-04T09:39:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.912882 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.926151 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.937690 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.946176 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.946212 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.946225 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.946241 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.946253 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:28Z","lastTransitionTime":"2025-12-04T09:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.947222 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.959313 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.981087 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:28 crc kubenswrapper[4707]: I1204 09:39:28.995325 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:28Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.012111 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.030037 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:05Z\\\",\\\"message\\\":\\\" 6355 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.810563 6355 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810603 6355 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810780 6355 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.811126 6355 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.811596 6355 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 09:39:05.811629 6355 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 09:39:05.811689 6355 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 09:39:05.811705 6355 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:39:05.811731 6355 factory.go:656] Stopping watch factory\\\\nI1204 09:39:05.811764 6355 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:39:05.811784 6355 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:39:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.048754 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.048784 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.048795 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.048811 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.048823 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:29Z","lastTransitionTime":"2025-12-04T09:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.151288 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.151319 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.151328 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.151359 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.151371 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:29Z","lastTransitionTime":"2025-12-04T09:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.254414 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.254455 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.254464 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.254477 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.254487 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:29Z","lastTransitionTime":"2025-12-04T09:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.356512 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.356548 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.356556 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.356570 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.356580 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:29Z","lastTransitionTime":"2025-12-04T09:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.458912 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.458977 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.458991 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.459009 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.459021 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:29Z","lastTransitionTime":"2025-12-04T09:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.561796 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.561853 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.561869 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.561886 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.561899 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:29Z","lastTransitionTime":"2025-12-04T09:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.664044 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.664107 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.664125 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.664146 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.664163 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:29Z","lastTransitionTime":"2025-12-04T09:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.766522 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.766556 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.766568 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.766586 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.766597 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:29Z","lastTransitionTime":"2025-12-04T09:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.783076 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-npc85_e9d3467a-1f4a-4d54-97b3-c7fd062eff13/kube-multus/0.log" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.783134 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-npc85" event={"ID":"e9d3467a-1f4a-4d54-97b3-c7fd062eff13","Type":"ContainerStarted","Data":"861f35b22449ce757109761274b8dca3126dee803aed610da0d726e6d510de95"} Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.797828 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://861f35b22449ce757109761274b8dca3126dee803aed610da0d726e6d510de95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:28Z\\\",\\\"message\\\":\\\"2025-12-04T09:38:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1e152b2d-b9f8-4fb5-a632-b380fe49a000\\\\n2025-12-04T09:38:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1e152b2d-b9f8-4fb5-a632-b380fe49a000 to /host/opt/cni/bin/\\\\n2025-12-04T09:38:43Z [verbose] multus-daemon started\\\\n2025-12-04T09:38:43Z [verbose] Readiness Indicator file check\\\\n2025-12-04T09:39:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.811441 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.825151 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.838235 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.844562 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:29 crc kubenswrapper[4707]: E1204 09:39:29.844694 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.844783 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:29 crc kubenswrapper[4707]: E1204 09:39:29.844969 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.849495 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.859998 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.868358 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.868391 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.868401 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.868416 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.868428 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:29Z","lastTransitionTime":"2025-12-04T09:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.871740 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.884036 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.909259 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.923388 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.935385 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.955014 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:05Z\\\",\\\"message\\\":\\\" 6355 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.810563 6355 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810603 6355 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810780 6355 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.811126 6355 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.811596 6355 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 09:39:05.811629 6355 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 09:39:05.811689 6355 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 09:39:05.811705 6355 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:39:05.811731 6355 factory.go:656] Stopping watch factory\\\\nI1204 09:39:05.811764 6355 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:39:05.811784 6355 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:39:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.969229 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5682ec3-78a9-47a8-a2bd-e4e58e4a5711\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b948fa6cd68a641df1c78251110c4e49e944be9affe87715c5b80be44f60ca00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e160007086212e3c54a49bdfcd7a43776b81c7561b18d963f0667181e2238c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ac30713fd3273b83ee133e02d9575ce884e1732adf4b612d134c208fd9cbf0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.970991 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.971051 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.971067 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.971091 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.971108 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:29Z","lastTransitionTime":"2025-12-04T09:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:29 crc kubenswrapper[4707]: I1204 09:39:29.988222 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:29Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.008738 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:30Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.020932 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:30Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.033707 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:30Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.045960 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:30Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.073586 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.073664 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.073677 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.073726 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.073740 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:30Z","lastTransitionTime":"2025-12-04T09:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.176354 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.176393 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.176405 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.176420 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.176430 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:30Z","lastTransitionTime":"2025-12-04T09:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.278994 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.279047 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.279065 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.279085 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.279098 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:30Z","lastTransitionTime":"2025-12-04T09:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.382156 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.382207 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.382225 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.382249 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.382269 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:30Z","lastTransitionTime":"2025-12-04T09:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.485024 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.485074 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.485091 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.485115 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.485132 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:30Z","lastTransitionTime":"2025-12-04T09:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.588281 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.588392 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.588437 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.588468 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.588490 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:30Z","lastTransitionTime":"2025-12-04T09:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.691710 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.692046 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.692184 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.692316 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.692526 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:30Z","lastTransitionTime":"2025-12-04T09:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.794383 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.794434 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.794448 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.794467 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.794478 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:30Z","lastTransitionTime":"2025-12-04T09:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.844528 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.844537 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:30 crc kubenswrapper[4707]: E1204 09:39:30.845102 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:30 crc kubenswrapper[4707]: E1204 09:39:30.845108 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.861519 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:30Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.876443 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:30Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.896879 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:30Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.897168 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.897214 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.897225 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.897243 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.897254 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:30Z","lastTransitionTime":"2025-12-04T09:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.923802 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:30Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.938577 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:30Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.952051 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:30Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.965662 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://861f35b22449ce757109761274b8dca3126dee803aed610da0d726e6d510de95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:28Z\\\",\\\"message\\\":\\\"2025-12-04T09:38:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1e152b2d-b9f8-4fb5-a632-b380fe49a000\\\\n2025-12-04T09:38:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1e152b2d-b9f8-4fb5-a632-b380fe49a000 to /host/opt/cni/bin/\\\\n2025-12-04T09:38:43Z [verbose] multus-daemon started\\\\n2025-12-04T09:38:43Z [verbose] Readiness Indicator file check\\\\n2025-12-04T09:39:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:30Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.975365 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:30Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.988049 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:30Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.999527 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.999567 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.999577 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.999593 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:30 crc kubenswrapper[4707]: I1204 09:39:30.999603 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:30Z","lastTransitionTime":"2025-12-04T09:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.000283 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:30Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.027841 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:05Z\\\",\\\"message\\\":\\\" 6355 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.810563 6355 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810603 6355 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810780 6355 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.811126 6355 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.811596 6355 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 09:39:05.811629 6355 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 09:39:05.811689 6355 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 09:39:05.811705 6355 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:39:05.811731 6355 factory.go:656] Stopping watch factory\\\\nI1204 09:39:05.811764 6355 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:39:05.811784 6355 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:39:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:31Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.046115 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:31Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.061836 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:31Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.075969 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:31Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.086776 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:31Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.097596 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:31Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.101258 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.101349 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.101364 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.101380 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.101392 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:31Z","lastTransitionTime":"2025-12-04T09:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.110443 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:31Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.124303 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5682ec3-78a9-47a8-a2bd-e4e58e4a5711\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b948fa6cd68a641df1c78251110c4e49e944be9affe87715c5b80be44f60ca00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e160007086212e3c54a49bdfcd7a43776b81c7561b18d963f0667181e2238c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ac30713fd3273b83ee133e02d9575ce884e1732adf4b612d134c208fd9cbf0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:31Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.203028 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.203056 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.203063 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.203076 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.203086 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:31Z","lastTransitionTime":"2025-12-04T09:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.305638 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.305687 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.305703 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.305719 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.306122 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:31Z","lastTransitionTime":"2025-12-04T09:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.408201 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.408246 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.408264 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.408288 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.408307 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:31Z","lastTransitionTime":"2025-12-04T09:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.511937 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.511989 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.512002 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.512019 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.512038 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:31Z","lastTransitionTime":"2025-12-04T09:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.614512 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.614575 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.614584 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.614598 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.614607 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:31Z","lastTransitionTime":"2025-12-04T09:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.717316 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.717362 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.717372 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.717385 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.717394 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:31Z","lastTransitionTime":"2025-12-04T09:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.820647 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.820713 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.820731 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.820759 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.820777 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:31Z","lastTransitionTime":"2025-12-04T09:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.844050 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.844080 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:31 crc kubenswrapper[4707]: E1204 09:39:31.844395 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:31 crc kubenswrapper[4707]: E1204 09:39:31.844210 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.923865 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.923921 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.923953 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.923978 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:31 crc kubenswrapper[4707]: I1204 09:39:31.923995 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:31Z","lastTransitionTime":"2025-12-04T09:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.026560 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.026610 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.026622 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.026641 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.026653 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:32Z","lastTransitionTime":"2025-12-04T09:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.130788 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.130857 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.130872 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.130891 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.130903 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:32Z","lastTransitionTime":"2025-12-04T09:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.234001 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.234437 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.234615 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.234794 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.234953 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:32Z","lastTransitionTime":"2025-12-04T09:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.337090 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.337318 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.337400 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.337491 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.337566 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:32Z","lastTransitionTime":"2025-12-04T09:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.440569 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.440818 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.440884 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.440953 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.441015 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:32Z","lastTransitionTime":"2025-12-04T09:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.543751 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.543813 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.543831 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.543860 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.543877 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:32Z","lastTransitionTime":"2025-12-04T09:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.646580 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.646625 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.646638 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.646653 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.646664 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:32Z","lastTransitionTime":"2025-12-04T09:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.748793 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.748885 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.748904 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.748918 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.748929 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:32Z","lastTransitionTime":"2025-12-04T09:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.844654 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:32 crc kubenswrapper[4707]: E1204 09:39:32.844822 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.844673 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:32 crc kubenswrapper[4707]: E1204 09:39:32.845204 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.851395 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.851432 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.851441 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.851455 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.851465 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:32Z","lastTransitionTime":"2025-12-04T09:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.953502 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.953539 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.953549 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.953564 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:32 crc kubenswrapper[4707]: I1204 09:39:32.953575 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:32Z","lastTransitionTime":"2025-12-04T09:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.055864 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.055908 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.055925 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.055947 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.055965 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:33Z","lastTransitionTime":"2025-12-04T09:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.159264 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.159323 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.159360 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.159378 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.159391 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:33Z","lastTransitionTime":"2025-12-04T09:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.262034 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.262323 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.262432 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.262526 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.262604 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:33Z","lastTransitionTime":"2025-12-04T09:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.365428 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.365485 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.365502 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.365530 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.365561 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:33Z","lastTransitionTime":"2025-12-04T09:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.467402 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.467441 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.467454 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.467469 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.467480 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:33Z","lastTransitionTime":"2025-12-04T09:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.569965 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.570476 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.570543 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.570613 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.570678 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:33Z","lastTransitionTime":"2025-12-04T09:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.672890 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.672929 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.672940 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.672955 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.672967 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:33Z","lastTransitionTime":"2025-12-04T09:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.775192 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.775228 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.775237 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.775250 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.775260 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:33Z","lastTransitionTime":"2025-12-04T09:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.844218 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.844250 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:33 crc kubenswrapper[4707]: E1204 09:39:33.844368 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:33 crc kubenswrapper[4707]: E1204 09:39:33.844535 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.877466 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.877523 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.877534 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.877550 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.877561 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:33Z","lastTransitionTime":"2025-12-04T09:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.979737 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.979790 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.979805 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.979825 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:33 crc kubenswrapper[4707]: I1204 09:39:33.979841 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:33Z","lastTransitionTime":"2025-12-04T09:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.082008 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.082046 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.082058 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.082075 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.082088 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:34Z","lastTransitionTime":"2025-12-04T09:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.185072 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.185122 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.185132 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.185148 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.185159 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:34Z","lastTransitionTime":"2025-12-04T09:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.275845 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.276223 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.276398 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.276569 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.276694 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:34Z","lastTransitionTime":"2025-12-04T09:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:34 crc kubenswrapper[4707]: E1204 09:39:34.290214 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:34Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.294264 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.294302 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.294313 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.294348 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.294360 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:34Z","lastTransitionTime":"2025-12-04T09:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:34 crc kubenswrapper[4707]: E1204 09:39:34.306434 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:34Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.309847 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.309893 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.309905 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.309920 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.309932 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:34Z","lastTransitionTime":"2025-12-04T09:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:34 crc kubenswrapper[4707]: E1204 09:39:34.324697 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:34Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.329244 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.329309 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.329322 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.329362 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.329374 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:34Z","lastTransitionTime":"2025-12-04T09:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:34 crc kubenswrapper[4707]: E1204 09:39:34.341738 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:34Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.346074 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.346107 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.346114 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.346128 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.346137 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:34Z","lastTransitionTime":"2025-12-04T09:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:34 crc kubenswrapper[4707]: E1204 09:39:34.361778 4707 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"3ceb1816-cf72-4648-bec6-4ad3a2135d2a\\\",\\\"systemUUID\\\":\\\"eae3f82d-6a5b-493a-a51e-c8ee4e7acba2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:34Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:34 crc kubenswrapper[4707]: E1204 09:39:34.361960 4707 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.363720 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.363852 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.363929 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.364002 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.364064 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:34Z","lastTransitionTime":"2025-12-04T09:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.466072 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.466115 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.466127 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.466146 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.466158 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:34Z","lastTransitionTime":"2025-12-04T09:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.569731 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.569779 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.569791 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.569810 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.569823 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:34Z","lastTransitionTime":"2025-12-04T09:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.672602 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.672637 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.672645 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.672657 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.672666 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:34Z","lastTransitionTime":"2025-12-04T09:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.774804 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.774852 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.774863 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.774879 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.774888 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:34Z","lastTransitionTime":"2025-12-04T09:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.844133 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.844261 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:34 crc kubenswrapper[4707]: E1204 09:39:34.844454 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:34 crc kubenswrapper[4707]: E1204 09:39:34.844584 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.877996 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.878271 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.878371 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.878457 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.878555 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:34Z","lastTransitionTime":"2025-12-04T09:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.981960 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.982033 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.982050 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.982075 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:34 crc kubenswrapper[4707]: I1204 09:39:34.982096 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:34Z","lastTransitionTime":"2025-12-04T09:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.085648 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.085954 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.086107 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.086211 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.086296 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:35Z","lastTransitionTime":"2025-12-04T09:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.188756 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.188816 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.188827 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.188842 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.188852 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:35Z","lastTransitionTime":"2025-12-04T09:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.291369 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.291420 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.291432 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.291448 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.291459 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:35Z","lastTransitionTime":"2025-12-04T09:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.394771 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.394822 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.394839 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.394866 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.394883 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:35Z","lastTransitionTime":"2025-12-04T09:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.496875 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.496926 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.496940 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.496956 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.496967 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:35Z","lastTransitionTime":"2025-12-04T09:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.599409 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.599470 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.599481 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.599496 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.599508 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:35Z","lastTransitionTime":"2025-12-04T09:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.702181 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.702235 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.702250 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.702267 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.702278 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:35Z","lastTransitionTime":"2025-12-04T09:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.805477 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.805512 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.805522 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.805541 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.805554 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:35Z","lastTransitionTime":"2025-12-04T09:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.844170 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.844259 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:35 crc kubenswrapper[4707]: E1204 09:39:35.844332 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:35 crc kubenswrapper[4707]: E1204 09:39:35.844680 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.908166 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.908240 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.908267 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.908299 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:35 crc kubenswrapper[4707]: I1204 09:39:35.908323 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:35Z","lastTransitionTime":"2025-12-04T09:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.010851 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.010888 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.010896 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.010908 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.010916 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:36Z","lastTransitionTime":"2025-12-04T09:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.114280 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.114400 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.114483 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.114513 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.114534 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:36Z","lastTransitionTime":"2025-12-04T09:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.217793 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.217915 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.217935 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.217967 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.217987 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:36Z","lastTransitionTime":"2025-12-04T09:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.320575 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.320636 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.320652 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.320673 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.320688 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:36Z","lastTransitionTime":"2025-12-04T09:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.423212 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.423364 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.423382 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.423407 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.423421 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:36Z","lastTransitionTime":"2025-12-04T09:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.526118 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.526178 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.526191 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.526214 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.526622 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:36Z","lastTransitionTime":"2025-12-04T09:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.629597 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.629654 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.629666 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.629680 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.629691 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:36Z","lastTransitionTime":"2025-12-04T09:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.732237 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.732303 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.732320 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.732406 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.732442 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:36Z","lastTransitionTime":"2025-12-04T09:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.835602 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.835685 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.835710 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.835739 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.835763 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:36Z","lastTransitionTime":"2025-12-04T09:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.844301 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.844376 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:36 crc kubenswrapper[4707]: E1204 09:39:36.844546 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:36 crc kubenswrapper[4707]: E1204 09:39:36.844794 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.939378 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.939464 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.939487 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.939515 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:36 crc kubenswrapper[4707]: I1204 09:39:36.939534 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:36Z","lastTransitionTime":"2025-12-04T09:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.042900 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.042975 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.042989 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.043017 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.043033 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:37Z","lastTransitionTime":"2025-12-04T09:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.145487 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.145556 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.145574 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.145607 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.145630 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:37Z","lastTransitionTime":"2025-12-04T09:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.248456 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.248484 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.248493 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.248505 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.248514 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:37Z","lastTransitionTime":"2025-12-04T09:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.351306 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.351404 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.351424 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.351448 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.351472 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:37Z","lastTransitionTime":"2025-12-04T09:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.453261 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.453510 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.453522 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.453536 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.453545 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:37Z","lastTransitionTime":"2025-12-04T09:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.556941 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.556992 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.557007 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.557028 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.557041 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:37Z","lastTransitionTime":"2025-12-04T09:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.659675 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.659723 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.659733 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.659748 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.659760 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:37Z","lastTransitionTime":"2025-12-04T09:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.762246 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.762303 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.762312 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.762331 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.762390 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:37Z","lastTransitionTime":"2025-12-04T09:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.844866 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.844948 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:37 crc kubenswrapper[4707]: E1204 09:39:37.845160 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:37 crc kubenswrapper[4707]: E1204 09:39:37.845271 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.846066 4707 scope.go:117] "RemoveContainer" containerID="dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.865070 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.865135 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.865147 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.865163 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.865175 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:37Z","lastTransitionTime":"2025-12-04T09:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.967819 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.968191 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.968204 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.968220 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:37 crc kubenswrapper[4707]: I1204 09:39:37.968234 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:37Z","lastTransitionTime":"2025-12-04T09:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.071236 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.071278 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.071289 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.071304 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.071315 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:38Z","lastTransitionTime":"2025-12-04T09:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.173613 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.173679 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.173691 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.173707 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.173719 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:38Z","lastTransitionTime":"2025-12-04T09:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.276838 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.276876 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.276886 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.276899 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.276908 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:38Z","lastTransitionTime":"2025-12-04T09:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.380081 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.380159 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.380185 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.380215 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.380237 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:38Z","lastTransitionTime":"2025-12-04T09:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.482946 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.482996 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.483007 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.483020 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.483033 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:38Z","lastTransitionTime":"2025-12-04T09:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.586066 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.586142 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.586152 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.586172 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.586199 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:38Z","lastTransitionTime":"2025-12-04T09:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.689540 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.689595 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.689607 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.689624 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.689637 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:38Z","lastTransitionTime":"2025-12-04T09:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.791355 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.791393 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.791403 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.791417 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.791428 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:38Z","lastTransitionTime":"2025-12-04T09:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.838095 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/2.log" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.844431 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.844521 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:38 crc kubenswrapper[4707]: E1204 09:39:38.844639 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:38 crc kubenswrapper[4707]: E1204 09:39:38.844716 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.848607 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerStarted","Data":"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025"} Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.849032 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.865178 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:38Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.882613 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:05Z\\\",\\\"message\\\":\\\" 6355 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.810563 6355 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810603 6355 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810780 6355 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.811126 6355 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.811596 6355 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 09:39:05.811629 6355 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 09:39:05.811689 6355 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 09:39:05.811705 6355 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:39:05.811731 6355 factory.go:656] Stopping watch factory\\\\nI1204 09:39:05.811764 6355 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:39:05.811784 6355 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:39:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:38Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.894358 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.894396 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.894406 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.894419 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.894429 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:38Z","lastTransitionTime":"2025-12-04T09:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.938802 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:38Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.952665 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:38Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.966652 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:38Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.978061 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:38Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.991024 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:38Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.997256 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.997326 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.997365 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.997384 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:38 crc kubenswrapper[4707]: I1204 09:39:38.997396 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:38Z","lastTransitionTime":"2025-12-04T09:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.002629 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.015961 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5682ec3-78a9-47a8-a2bd-e4e58e4a5711\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b948fa6cd68a641df1c78251110c4e49e944be9affe87715c5b80be44f60ca00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e160007086212e3c54a49bdfcd7a43776b81c7561b18d963f0667181e2238c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ac30713fd3273b83ee133e02d9575ce884e1732adf4b612d134c208fd9cbf0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.028179 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.043168 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.056378 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.067024 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.076247 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.088630 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.099578 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.099630 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.099642 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.099663 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.099678 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:39Z","lastTransitionTime":"2025-12-04T09:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.100738 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://861f35b22449ce757109761274b8dca3126dee803aed610da0d726e6d510de95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:28Z\\\",\\\"message\\\":\\\"2025-12-04T09:38:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1e152b2d-b9f8-4fb5-a632-b380fe49a000\\\\n2025-12-04T09:38:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1e152b2d-b9f8-4fb5-a632-b380fe49a000 to /host/opt/cni/bin/\\\\n2025-12-04T09:38:43Z [verbose] multus-daemon started\\\\n2025-12-04T09:38:43Z [verbose] Readiness Indicator file check\\\\n2025-12-04T09:39:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.114023 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.130232 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.202621 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.202870 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.202994 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.203066 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.203127 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:39Z","lastTransitionTime":"2025-12-04T09:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.306513 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.306760 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.306841 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.306935 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.307026 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:39Z","lastTransitionTime":"2025-12-04T09:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.408862 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.408952 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.408973 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.409004 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.409027 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:39Z","lastTransitionTime":"2025-12-04T09:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.511433 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.511463 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.511471 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.511484 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.511492 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:39Z","lastTransitionTime":"2025-12-04T09:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.614055 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.614099 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.614110 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.614126 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.614138 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:39Z","lastTransitionTime":"2025-12-04T09:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.716459 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.717135 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.717286 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.717567 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.717667 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:39Z","lastTransitionTime":"2025-12-04T09:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.821577 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.821875 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.822084 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.822307 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.822425 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:39Z","lastTransitionTime":"2025-12-04T09:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.844154 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.844203 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:39 crc kubenswrapper[4707]: E1204 09:39:39.844555 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:39 crc kubenswrapper[4707]: E1204 09:39:39.844703 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.851558 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/3.log" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.852117 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/2.log" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.856924 4707 generic.go:334] "Generic (PLEG): container finished" podID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerID="1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025" exitCode=1 Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.856975 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerDied","Data":"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025"} Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.857374 4707 scope.go:117] "RemoveContainer" containerID="dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.858165 4707 scope.go:117] "RemoveContainer" containerID="1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025" Dec 04 09:39:39 crc kubenswrapper[4707]: E1204 09:39:39.858629 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.890899 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.909596 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.926157 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.926196 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.926209 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.926228 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.926240 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:39Z","lastTransitionTime":"2025-12-04T09:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.930492 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.957262 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:05Z\\\",\\\"message\\\":\\\" 6355 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.810563 6355 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810603 6355 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810780 6355 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.811126 6355 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.811596 6355 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 09:39:05.811629 6355 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 09:39:05.811689 6355 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 09:39:05.811705 6355 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:39:05.811731 6355 factory.go:656] Stopping watch factory\\\\nI1204 09:39:05.811764 6355 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:39:05.811784 6355 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:39:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:39Z\\\",\\\"message\\\":\\\"04 09:39:39.140028 6732 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:39.140159 6732 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:39.140261 6732 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:39.140351 6732 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:39.140417 6732 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1204 09:39:39.140966 6732 factory.go:656] Stopping watch factory\\\\nI1204 09:39:39.140986 6732 ovnkube.go:599] Stopped ovnkube\\\\nI1204 09:39:39.141014 6732 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 09:39:39.141106 6732 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.972795 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:39 crc kubenswrapper[4707]: I1204 09:39:39.989016 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:39Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.004976 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5682ec3-78a9-47a8-a2bd-e4e58e4a5711\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b948fa6cd68a641df1c78251110c4e49e944be9affe87715c5b80be44f60ca00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e160007086212e3c54a49bdfcd7a43776b81c7561b18d963f0667181e2238c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ac30713fd3273b83ee133e02d9575ce884e1732adf4b612d134c208fd9cbf0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.019667 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.032519 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.032781 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.032864 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.032978 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.033053 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:40Z","lastTransitionTime":"2025-12-04T09:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.035274 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.046997 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.062559 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.073903 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.088217 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.103146 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://861f35b22449ce757109761274b8dca3126dee803aed610da0d726e6d510de95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:28Z\\\",\\\"message\\\":\\\"2025-12-04T09:38:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1e152b2d-b9f8-4fb5-a632-b380fe49a000\\\\n2025-12-04T09:38:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1e152b2d-b9f8-4fb5-a632-b380fe49a000 to /host/opt/cni/bin/\\\\n2025-12-04T09:38:43Z [verbose] multus-daemon started\\\\n2025-12-04T09:38:43Z [verbose] Readiness Indicator file check\\\\n2025-12-04T09:39:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.120218 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.134474 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.136989 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.137048 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.137061 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.137079 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.137091 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:40Z","lastTransitionTime":"2025-12-04T09:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.150043 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.166398 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.239544 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.239607 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.239617 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.239630 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.239640 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:40Z","lastTransitionTime":"2025-12-04T09:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.342849 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.343223 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.343422 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.343570 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.343676 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:40Z","lastTransitionTime":"2025-12-04T09:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.446943 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.446985 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.446999 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.447018 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.447033 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:40Z","lastTransitionTime":"2025-12-04T09:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.549652 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.549686 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.549696 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.549712 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.549723 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:40Z","lastTransitionTime":"2025-12-04T09:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.652180 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.652264 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.652306 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.652328 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.652374 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:40Z","lastTransitionTime":"2025-12-04T09:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.755625 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.755689 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.755707 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.755741 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.755776 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:40Z","lastTransitionTime":"2025-12-04T09:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.844363 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.844447 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:40 crc kubenswrapper[4707]: E1204 09:39:40.844510 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:40 crc kubenswrapper[4707]: E1204 09:39:40.844617 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.858681 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.859442 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.859478 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.859500 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.859444 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://861f35b22449ce757109761274b8dca3126dee803aed610da0d726e6d510de95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:28Z\\\",\\\"message\\\":\\\"2025-12-04T09:38:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1e152b2d-b9f8-4fb5-a632-b380fe49a000\\\\n2025-12-04T09:38:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1e152b2d-b9f8-4fb5-a632-b380fe49a000 to /host/opt/cni/bin/\\\\n2025-12-04T09:38:43Z [verbose] multus-daemon started\\\\n2025-12-04T09:38:43Z [verbose] Readiness Indicator file check\\\\n2025-12-04T09:39:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.859515 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:40Z","lastTransitionTime":"2025-12-04T09:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.862560 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/3.log" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.865959 4707 scope.go:117] "RemoveContainer" containerID="1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025" Dec 04 09:39:40 crc kubenswrapper[4707]: E1204 09:39:40.866103 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.875636 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.891888 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.907555 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.920065 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.930885 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.941255 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.951513 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.962104 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.962133 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.962141 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.962153 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.962163 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:40Z","lastTransitionTime":"2025-12-04T09:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.968000 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.978693 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:40 crc kubenswrapper[4707]: I1204 09:39:40.992109 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:40Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.009602 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd90a2625d125a4bbe089306910246cbf14d01b268d92e196c3b32a5ab4b50fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:05Z\\\",\\\"message\\\":\\\" 6355 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.810563 6355 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810603 6355 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.810780 6355 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 09:39:05.811126 6355 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:05.811596 6355 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 09:39:05.811629 6355 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 09:39:05.811689 6355 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 09:39:05.811705 6355 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1204 09:39:05.811731 6355 factory.go:656] Stopping watch factory\\\\nI1204 09:39:05.811764 6355 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 09:39:05.811784 6355 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:39:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:39Z\\\",\\\"message\\\":\\\"04 09:39:39.140028 6732 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:39.140159 6732 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:39.140261 6732 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:39.140351 6732 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:39.140417 6732 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1204 09:39:39.140966 6732 factory.go:656] Stopping watch factory\\\\nI1204 09:39:39.140986 6732 ovnkube.go:599] Stopped ovnkube\\\\nI1204 09:39:39.141014 6732 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 09:39:39.141106 6732 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.021665 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5682ec3-78a9-47a8-a2bd-e4e58e4a5711\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b948fa6cd68a641df1c78251110c4e49e944be9affe87715c5b80be44f60ca00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e160007086212e3c54a49bdfcd7a43776b81c7561b18d963f0667181e2238c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ac30713fd3273b83ee133e02d9575ce884e1732adf4b612d134c208fd9cbf0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.034908 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.047927 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.056943 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.065770 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.067102 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.067135 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.067146 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.067161 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.067173 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:41Z","lastTransitionTime":"2025-12-04T09:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.074993 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.084163 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b5682ec3-78a9-47a8-a2bd-e4e58e4a5711\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b948fa6cd68a641df1c78251110c4e49e944be9affe87715c5b80be44f60ca00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e160007086212e3c54a49bdfcd7a43776b81c7561b18d963f0667181e2238c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ac30713fd3273b83ee133e02d9575ce884e1732adf4b612d134c208fd9cbf0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2a896af9cef44e3d2a4d67c779b311d9521fee07cc7f1d036627665d3de5e43\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.094854 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.108295 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3e11cde-e689-4b58-b238-08e945d8de0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c664560d34c3981bb7ced5309107a17a733e42262282d38ca0764d93123d5cb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c22276d13ebfbbafc80fa824ef60450a33005d088fad0c8d664902df312ea161\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c463648555fe01f6af10abbdacd35fb537c87f65203e16b51341f974216097cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28e7464585125e6c5f612888f1e60be5bea4104d7b3ac3ad3584a96cf8b12618\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a081c3c124ea87f88808e2e6a7d7a7c652702788948114a018ab0b253c40d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fc2ee628735ab99c5dc83f9534fed3ca98b97d4ad2e2283b2b41c3c9ac6a9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb7d058aeb3834d0a6b022ad1e3554334ea58e5124f4ae596446b25c1e5e2a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kfcrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bk2sb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.117755 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4wsl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f048d2e7-fb0a-4323-ab77-9ea37e5f3926\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0816345f70239811ccc1ed0a9d77e4aa2478257f68fa9f8fc533b2c4108a3936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sx52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:43Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4wsl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.128866 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1efd049a-60ac-4370-8d36-37674547ede9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23c8233ce896e1491cfa203c29fbc2152ae41fac6239b109976ed3369a77aec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://45d7210f7b6d6fc2096c7f1d9cf3457adb3915080d8436c7e77c9b5b3a45979d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lz6tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nm9w6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.140435 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-txkn2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a8009fd-d652-44fb-8ef1-73078262e8fa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl4bm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-txkn2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.152588 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 09:38:33.330872 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 09:38:33.332404 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2831736728/tls.crt::/tmp/serving-cert-2831736728/tls.key\\\\\\\"\\\\nI1204 09:38:38.708176 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 09:38:38.709969 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 09:38:38.709987 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 09:38:38.710005 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 09:38:38.710010 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 09:38:38.718589 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 09:38:38.718643 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718649 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 09:38:38.718657 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 09:38:38.718662 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 09:38:38.719659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 09:38:38.719679 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 09:38:38.718713 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 09:38:38.718785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.166113 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92ab2ac7-f0c5-4a6d-9684-993992b4bab1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7af25c91e3c72c5529a413bedc19d1862e64b3bbe2d219a4b153a4260363f74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66fbc06fc5259ffef839a22b81974348c2eaa94ca400b2ae890266b61656bb75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e9ca7aea45a257042704e52ecbff3c0b229291c770e393a930864f21a5795c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.170624 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.170657 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.170666 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.170682 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.170692 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:41Z","lastTransitionTime":"2025-12-04T09:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.180385 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c044a9db98acf92992e7aca87809af03f42e5ea79ce969adba30ee3da59027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67607fda068a672cbf6eddd3e76e9de82cec1282060449ef7eac143a90064400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.225886 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b8aa67db2b3d293beb6cbdaae6565954f97bae8517a64e63a3fed471ae1437b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.240054 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2wjkm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e90cc2b8-9e61-4b1c-9344-7561316fa30a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be33a8659ab2ad2d3184e665389e597346f2fb24562965845435dbb5e9f5a6e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwr4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2wjkm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.252859 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e64897e0-4162-4aa8-9c13-8a4262a3ca3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79aef108629e896725c3b0d98ad111fa2bd8ce11262c09f00cedcca4bae6bed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cjwl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c244z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.268644 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-npc85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9d3467a-1f4a-4d54-97b3-c7fd062eff13\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://861f35b22449ce757109761274b8dca3126dee803aed610da0d726e6d510de95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:28Z\\\",\\\"message\\\":\\\"2025-12-04T09:38:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1e152b2d-b9f8-4fb5-a632-b380fe49a000\\\\n2025-12-04T09:38:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1e152b2d-b9f8-4fb5-a632-b380fe49a000 to /host/opt/cni/bin/\\\\n2025-12-04T09:38:43Z [verbose] multus-daemon started\\\\n2025-12-04T09:38:43Z [verbose] Readiness Indicator file check\\\\n2025-12-04T09:39:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4r6rx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-npc85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.272855 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.272882 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.272891 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.272903 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.272912 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:41Z","lastTransitionTime":"2025-12-04T09:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.282741 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.302477 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8694dcf9-805f-405a-b998-41548943ef6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0c7504e06055d62fe18380c5a36d7cf78cffc91082eac729a0953de3a543568\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fac3361dbcb025f9141cd0ead45faa9ff69006064bcbcece103b8aa8ee248791\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b4249503157dc00334768a70e881e99471e1e5905cf365daa06fc3d1eeb707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b2ba37cd3624366b4cd54ebc3beddfef4e4baf06d32af8e4f8d1ff800ff237a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4aaa75976315da808d50be6aef55ed04121e719ea44cb0d7479a996dd8a75e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5deb62f5103b5c9ab21ba12d79b80f0fa1476ad06dd5b57a2c719218e11df5ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56e73d9acb999d39e71b2e9d6a3c8437c203be2aae59b78ed196b5590e0351f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:22Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ab204eb6c57cd6d94a661eec9fc58febd58126376edbfc79090e3a03236e742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.315450 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.327348 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d082b5019ef85aa348c0fc8855bfdf1cc385eabe488e99c3387e2b82a804169\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.343881 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T09:39:39Z\\\",\\\"message\\\":\\\"04 09:39:39.140028 6732 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:39.140159 6732 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:39.140261 6732 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:39.140351 6732 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 09:39:39.140417 6732 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1204 09:39:39.140966 6732 factory.go:656] Stopping watch factory\\\\nI1204 09:39:39.140986 6732 ovnkube.go:599] Stopped ovnkube\\\\nI1204 09:39:39.141014 6732 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 09:39:39.141106 6732 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T09:39:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:38:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:38:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:38:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f472b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T09:38:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6nd57\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T09:39:41Z is after 2025-08-24T17:21:41Z" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.375217 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.375374 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.375476 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.375583 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.375665 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:41Z","lastTransitionTime":"2025-12-04T09:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.478172 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.478207 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.478216 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.478228 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.478236 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:41Z","lastTransitionTime":"2025-12-04T09:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.580737 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.580771 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.580783 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.580798 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.580809 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:41Z","lastTransitionTime":"2025-12-04T09:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.683472 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.683505 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.683515 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.683529 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.683541 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:41Z","lastTransitionTime":"2025-12-04T09:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.786235 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.786286 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.786294 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.786309 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.786319 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:41Z","lastTransitionTime":"2025-12-04T09:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.844260 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.844260 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:41 crc kubenswrapper[4707]: E1204 09:39:41.844615 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:41 crc kubenswrapper[4707]: E1204 09:39:41.844459 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.889678 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.889744 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.889767 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.889797 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.889819 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:41Z","lastTransitionTime":"2025-12-04T09:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.992798 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.992875 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.992897 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.992924 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:41 crc kubenswrapper[4707]: I1204 09:39:41.992955 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:41Z","lastTransitionTime":"2025-12-04T09:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.096311 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.096386 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.096397 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.096412 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.096421 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:42Z","lastTransitionTime":"2025-12-04T09:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.198370 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.198452 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.198463 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.198478 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.198490 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:42Z","lastTransitionTime":"2025-12-04T09:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.301230 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.301325 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.301380 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.301397 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.301409 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:42Z","lastTransitionTime":"2025-12-04T09:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.403392 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.403433 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.403444 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.403458 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.403466 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:42Z","lastTransitionTime":"2025-12-04T09:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.506869 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.506928 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.506946 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.506974 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.506993 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:42Z","lastTransitionTime":"2025-12-04T09:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.605206 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.605393 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.605504 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.605478096 +0000 UTC m=+146.041300633 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.605532 4707 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.605573 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.605631 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.60560679 +0000 UTC m=+146.041429457 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.605713 4707 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.605779 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.605765595 +0000 UTC m=+146.041588142 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.610617 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.610664 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.610674 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.610692 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.610702 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:42Z","lastTransitionTime":"2025-12-04T09:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.706650 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.706741 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.706878 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.706900 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.706913 4707 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.706964 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.706949187 +0000 UTC m=+146.142771694 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.706967 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.707050 4707 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.707073 4707 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.707154 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.707125262 +0000 UTC m=+146.142947809 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.714162 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.714233 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.714243 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.714260 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.714274 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:42Z","lastTransitionTime":"2025-12-04T09:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.815942 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.816275 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.816285 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.816299 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.816308 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:42Z","lastTransitionTime":"2025-12-04T09:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.844953 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.845227 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.845420 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:42 crc kubenswrapper[4707]: E1204 09:39:42.845536 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.919122 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.919173 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.919239 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.919736 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:42 crc kubenswrapper[4707]: I1204 09:39:42.919780 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:42Z","lastTransitionTime":"2025-12-04T09:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.022005 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.022061 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.022077 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.022148 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.022185 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:43Z","lastTransitionTime":"2025-12-04T09:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.125433 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.125488 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.125500 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.125519 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.125530 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:43Z","lastTransitionTime":"2025-12-04T09:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.228636 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.228723 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.228742 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.228772 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.228789 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:43Z","lastTransitionTime":"2025-12-04T09:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.331304 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.331407 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.331443 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.331472 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.331489 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:43Z","lastTransitionTime":"2025-12-04T09:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.434459 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.434499 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.434508 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.434521 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.434532 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:43Z","lastTransitionTime":"2025-12-04T09:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.537748 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.537806 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.537823 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.537851 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.537864 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:43Z","lastTransitionTime":"2025-12-04T09:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.640093 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.640138 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.640146 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.640159 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.640168 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:43Z","lastTransitionTime":"2025-12-04T09:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.742122 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.742198 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.742221 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.742249 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.742273 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:43Z","lastTransitionTime":"2025-12-04T09:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.844257 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.844396 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:43 crc kubenswrapper[4707]: E1204 09:39:43.844493 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.844541 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.844583 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:43 crc kubenswrapper[4707]: E1204 09:39:43.844581 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.844604 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.844628 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.844648 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:43Z","lastTransitionTime":"2025-12-04T09:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.947697 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.947788 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.947821 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.947855 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:43 crc kubenswrapper[4707]: I1204 09:39:43.947877 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:43Z","lastTransitionTime":"2025-12-04T09:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.051425 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.051488 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.051507 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.051535 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.051553 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:44Z","lastTransitionTime":"2025-12-04T09:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.154095 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.154142 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.154152 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.154168 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.154180 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:44Z","lastTransitionTime":"2025-12-04T09:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.256839 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.256890 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.256899 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.256913 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.256930 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:44Z","lastTransitionTime":"2025-12-04T09:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.359588 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.359639 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.359653 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.359667 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.359676 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:44Z","lastTransitionTime":"2025-12-04T09:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.462173 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.462236 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.462252 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.462269 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.462291 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:44Z","lastTransitionTime":"2025-12-04T09:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.565554 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.565596 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.565604 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.565618 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.565627 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:44Z","lastTransitionTime":"2025-12-04T09:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.576664 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.576708 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.576748 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.576767 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.576777 4707 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T09:39:44Z","lastTransitionTime":"2025-12-04T09:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.635092 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f"] Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.635601 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.638084 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.639079 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.639316 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.641298 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.678890 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=65.67887511 podStartE2EDuration="1m5.67887511s" podCreationTimestamp="2025-12-04 09:38:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:39:44.67630206 +0000 UTC m=+84.112124597" watchObservedRunningTime="2025-12-04 09:39:44.67887511 +0000 UTC m=+84.114697617" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.728521 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/442be505-fd50-4529-9be7-05e53ceeb7c9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.728562 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/442be505-fd50-4529-9be7-05e53ceeb7c9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.728584 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/442be505-fd50-4529-9be7-05e53ceeb7c9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.728610 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/442be505-fd50-4529-9be7-05e53ceeb7c9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.728688 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/442be505-fd50-4529-9be7-05e53ceeb7c9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.801244 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=35.801220133 podStartE2EDuration="35.801220133s" podCreationTimestamp="2025-12-04 09:39:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:39:44.788845726 +0000 UTC m=+84.224668233" watchObservedRunningTime="2025-12-04 09:39:44.801220133 +0000 UTC m=+84.237042640" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.818801 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-bk2sb" podStartSLOduration=63.818781581 podStartE2EDuration="1m3.818781581s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:39:44.81874624 +0000 UTC m=+84.254568747" watchObservedRunningTime="2025-12-04 09:39:44.818781581 +0000 UTC m=+84.254604088" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.829480 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/442be505-fd50-4529-9be7-05e53ceeb7c9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.829525 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/442be505-fd50-4529-9be7-05e53ceeb7c9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.829584 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/442be505-fd50-4529-9be7-05e53ceeb7c9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.829605 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/442be505-fd50-4529-9be7-05e53ceeb7c9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.829613 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/442be505-fd50-4529-9be7-05e53ceeb7c9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.829624 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/442be505-fd50-4529-9be7-05e53ceeb7c9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.830410 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/442be505-fd50-4529-9be7-05e53ceeb7c9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.831168 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/442be505-fd50-4529-9be7-05e53ceeb7c9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.835069 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/442be505-fd50-4529-9be7-05e53ceeb7c9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.846507 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.846535 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:44 crc kubenswrapper[4707]: E1204 09:39:44.846753 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:44 crc kubenswrapper[4707]: E1204 09:39:44.846842 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.847584 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nm9w6" podStartSLOduration=63.847573541 podStartE2EDuration="1m3.847573541s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:39:44.847185019 +0000 UTC m=+84.283007526" watchObservedRunningTime="2025-12-04 09:39:44.847573541 +0000 UTC m=+84.283396068" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.847933 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-s4wsl" podStartSLOduration=63.847925672 podStartE2EDuration="1m3.847925672s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:39:44.832403737 +0000 UTC m=+84.268226264" watchObservedRunningTime="2025-12-04 09:39:44.847925672 +0000 UTC m=+84.283748169" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.854101 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/442be505-fd50-4529-9be7-05e53ceeb7c9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9zq4f\" (UID: \"442be505-fd50-4529-9be7-05e53ceeb7c9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.859723 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.870372 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podStartSLOduration=63.870323382 podStartE2EDuration="1m3.870323382s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:39:44.869738484 +0000 UTC m=+84.305560991" watchObservedRunningTime="2025-12-04 09:39:44.870323382 +0000 UTC m=+84.306145889" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.883688 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-npc85" podStartSLOduration=63.883672259 podStartE2EDuration="1m3.883672259s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:39:44.882608486 +0000 UTC m=+84.318430993" watchObservedRunningTime="2025-12-04 09:39:44.883672259 +0000 UTC m=+84.319494756" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.897912 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=66.897898424 podStartE2EDuration="1m6.897898424s" podCreationTimestamp="2025-12-04 09:38:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:39:44.896993315 +0000 UTC m=+84.332815822" watchObservedRunningTime="2025-12-04 09:39:44.897898424 +0000 UTC m=+84.333720931" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.929758 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=66.929739628 podStartE2EDuration="1m6.929739628s" podCreationTimestamp="2025-12-04 09:38:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:39:44.914279305 +0000 UTC m=+84.350101812" watchObservedRunningTime="2025-12-04 09:39:44.929739628 +0000 UTC m=+84.365562135" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.952364 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" Dec 04 09:39:44 crc kubenswrapper[4707]: I1204 09:39:44.958794 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-2wjkm" podStartSLOduration=63.958777146 podStartE2EDuration="1m3.958777146s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:39:44.95795229 +0000 UTC m=+84.393774797" watchObservedRunningTime="2025-12-04 09:39:44.958777146 +0000 UTC m=+84.394599663" Dec 04 09:39:44 crc kubenswrapper[4707]: W1204 09:39:44.967401 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod442be505_fd50_4529_9be7_05e53ceeb7c9.slice/crio-3d348b7b1033f876ac52acc4f25e3b4b63e534fc554f5a4499fdd080d00f1e8e WatchSource:0}: Error finding container 3d348b7b1033f876ac52acc4f25e3b4b63e534fc554f5a4499fdd080d00f1e8e: Status 404 returned error can't find the container with id 3d348b7b1033f876ac52acc4f25e3b4b63e534fc554f5a4499fdd080d00f1e8e Dec 04 09:39:45 crc kubenswrapper[4707]: I1204 09:39:45.843928 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:45 crc kubenswrapper[4707]: I1204 09:39:45.844033 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:45 crc kubenswrapper[4707]: E1204 09:39:45.844755 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:45 crc kubenswrapper[4707]: E1204 09:39:45.844833 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:45 crc kubenswrapper[4707]: I1204 09:39:45.883063 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" event={"ID":"442be505-fd50-4529-9be7-05e53ceeb7c9","Type":"ContainerStarted","Data":"8cccf58e98fabba74c61454e7016fb5c597112786fd9fbbc0bb109d05d16aef1"} Dec 04 09:39:45 crc kubenswrapper[4707]: I1204 09:39:45.883120 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" event={"ID":"442be505-fd50-4529-9be7-05e53ceeb7c9","Type":"ContainerStarted","Data":"3d348b7b1033f876ac52acc4f25e3b4b63e534fc554f5a4499fdd080d00f1e8e"} Dec 04 09:39:45 crc kubenswrapper[4707]: I1204 09:39:45.895834 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=1.895816173 podStartE2EDuration="1.895816173s" podCreationTimestamp="2025-12-04 09:39:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:39:45.895306167 +0000 UTC m=+85.331128704" watchObservedRunningTime="2025-12-04 09:39:45.895816173 +0000 UTC m=+85.331638680" Dec 04 09:39:46 crc kubenswrapper[4707]: I1204 09:39:46.844610 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:46 crc kubenswrapper[4707]: E1204 09:39:46.844752 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:46 crc kubenswrapper[4707]: I1204 09:39:46.844631 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:46 crc kubenswrapper[4707]: E1204 09:39:46.844847 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:47 crc kubenswrapper[4707]: I1204 09:39:47.844438 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:47 crc kubenswrapper[4707]: I1204 09:39:47.844495 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:47 crc kubenswrapper[4707]: E1204 09:39:47.845157 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:47 crc kubenswrapper[4707]: E1204 09:39:47.845411 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:48 crc kubenswrapper[4707]: I1204 09:39:48.844555 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:48 crc kubenswrapper[4707]: I1204 09:39:48.844820 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:48 crc kubenswrapper[4707]: E1204 09:39:48.845016 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:48 crc kubenswrapper[4707]: E1204 09:39:48.845422 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:49 crc kubenswrapper[4707]: I1204 09:39:49.844835 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:49 crc kubenswrapper[4707]: I1204 09:39:49.844963 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:49 crc kubenswrapper[4707]: E1204 09:39:49.845017 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:49 crc kubenswrapper[4707]: E1204 09:39:49.845148 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:50 crc kubenswrapper[4707]: I1204 09:39:50.844106 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:50 crc kubenswrapper[4707]: I1204 09:39:50.844125 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:50 crc kubenswrapper[4707]: E1204 09:39:50.846647 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:50 crc kubenswrapper[4707]: E1204 09:39:50.847229 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:51 crc kubenswrapper[4707]: I1204 09:39:51.844536 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:51 crc kubenswrapper[4707]: I1204 09:39:51.844574 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:51 crc kubenswrapper[4707]: E1204 09:39:51.844709 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:51 crc kubenswrapper[4707]: E1204 09:39:51.844858 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:52 crc kubenswrapper[4707]: I1204 09:39:52.843899 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:52 crc kubenswrapper[4707]: I1204 09:39:52.844023 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:52 crc kubenswrapper[4707]: E1204 09:39:52.844149 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:52 crc kubenswrapper[4707]: E1204 09:39:52.844358 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:53 crc kubenswrapper[4707]: I1204 09:39:53.844633 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:53 crc kubenswrapper[4707]: E1204 09:39:53.844843 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:53 crc kubenswrapper[4707]: I1204 09:39:53.844632 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:53 crc kubenswrapper[4707]: E1204 09:39:53.845037 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:54 crc kubenswrapper[4707]: I1204 09:39:54.844555 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:54 crc kubenswrapper[4707]: I1204 09:39:54.844630 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:54 crc kubenswrapper[4707]: E1204 09:39:54.844823 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:54 crc kubenswrapper[4707]: E1204 09:39:54.845182 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:55 crc kubenswrapper[4707]: I1204 09:39:55.844703 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:55 crc kubenswrapper[4707]: I1204 09:39:55.844760 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:55 crc kubenswrapper[4707]: E1204 09:39:55.845196 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:55 crc kubenswrapper[4707]: E1204 09:39:55.845278 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:55 crc kubenswrapper[4707]: I1204 09:39:55.845748 4707 scope.go:117] "RemoveContainer" containerID="1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025" Dec 04 09:39:55 crc kubenswrapper[4707]: E1204 09:39:55.845947 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" Dec 04 09:39:56 crc kubenswrapper[4707]: I1204 09:39:56.844704 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:56 crc kubenswrapper[4707]: I1204 09:39:56.844704 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:56 crc kubenswrapper[4707]: E1204 09:39:56.844876 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:56 crc kubenswrapper[4707]: E1204 09:39:56.845193 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:57 crc kubenswrapper[4707]: I1204 09:39:57.844270 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:57 crc kubenswrapper[4707]: I1204 09:39:57.844305 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:57 crc kubenswrapper[4707]: E1204 09:39:57.844506 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:39:57 crc kubenswrapper[4707]: E1204 09:39:57.844684 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:58 crc kubenswrapper[4707]: I1204 09:39:58.844548 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:39:58 crc kubenswrapper[4707]: I1204 09:39:58.844578 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:58 crc kubenswrapper[4707]: E1204 09:39:58.844819 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:39:58 crc kubenswrapper[4707]: E1204 09:39:58.845066 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:39:59 crc kubenswrapper[4707]: I1204 09:39:59.401600 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs\") pod \"network-metrics-daemon-txkn2\" (UID: \"9a8009fd-d652-44fb-8ef1-73078262e8fa\") " pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:39:59 crc kubenswrapper[4707]: E1204 09:39:59.401818 4707 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:39:59 crc kubenswrapper[4707]: E1204 09:39:59.401906 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs podName:9a8009fd-d652-44fb-8ef1-73078262e8fa nodeName:}" failed. No retries permitted until 2025-12-04 09:41:03.4018833 +0000 UTC m=+162.837705807 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs") pod "network-metrics-daemon-txkn2" (UID: "9a8009fd-d652-44fb-8ef1-73078262e8fa") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 09:39:59 crc kubenswrapper[4707]: I1204 09:39:59.844911 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:39:59 crc kubenswrapper[4707]: I1204 09:39:59.844938 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:39:59 crc kubenswrapper[4707]: E1204 09:39:59.845055 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:39:59 crc kubenswrapper[4707]: E1204 09:39:59.845222 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:00 crc kubenswrapper[4707]: I1204 09:40:00.844915 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:00 crc kubenswrapper[4707]: I1204 09:40:00.844982 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:00 crc kubenswrapper[4707]: E1204 09:40:00.846922 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:00 crc kubenswrapper[4707]: E1204 09:40:00.847326 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:01 crc kubenswrapper[4707]: I1204 09:40:01.844397 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:01 crc kubenswrapper[4707]: I1204 09:40:01.844399 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:01 crc kubenswrapper[4707]: E1204 09:40:01.844936 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:01 crc kubenswrapper[4707]: E1204 09:40:01.845035 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:02 crc kubenswrapper[4707]: I1204 09:40:02.844546 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:02 crc kubenswrapper[4707]: I1204 09:40:02.844620 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:02 crc kubenswrapper[4707]: E1204 09:40:02.844690 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:02 crc kubenswrapper[4707]: E1204 09:40:02.844748 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:03 crc kubenswrapper[4707]: I1204 09:40:03.844735 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:03 crc kubenswrapper[4707]: I1204 09:40:03.844749 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:03 crc kubenswrapper[4707]: E1204 09:40:03.844973 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:03 crc kubenswrapper[4707]: E1204 09:40:03.845140 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:04 crc kubenswrapper[4707]: I1204 09:40:04.844756 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:04 crc kubenswrapper[4707]: I1204 09:40:04.844768 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:04 crc kubenswrapper[4707]: E1204 09:40:04.845071 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:04 crc kubenswrapper[4707]: E1204 09:40:04.845114 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:05 crc kubenswrapper[4707]: I1204 09:40:05.844103 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:05 crc kubenswrapper[4707]: I1204 09:40:05.844115 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:05 crc kubenswrapper[4707]: E1204 09:40:05.844289 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:05 crc kubenswrapper[4707]: E1204 09:40:05.844454 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:06 crc kubenswrapper[4707]: I1204 09:40:06.844797 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:06 crc kubenswrapper[4707]: I1204 09:40:06.844929 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:06 crc kubenswrapper[4707]: E1204 09:40:06.845035 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:06 crc kubenswrapper[4707]: E1204 09:40:06.845287 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:07 crc kubenswrapper[4707]: I1204 09:40:07.844580 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:07 crc kubenswrapper[4707]: I1204 09:40:07.844590 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:07 crc kubenswrapper[4707]: E1204 09:40:07.844744 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:07 crc kubenswrapper[4707]: E1204 09:40:07.844791 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:08 crc kubenswrapper[4707]: I1204 09:40:08.844265 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:08 crc kubenswrapper[4707]: E1204 09:40:08.844495 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:08 crc kubenswrapper[4707]: I1204 09:40:08.844729 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:08 crc kubenswrapper[4707]: E1204 09:40:08.844958 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:09 crc kubenswrapper[4707]: I1204 09:40:09.844354 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:09 crc kubenswrapper[4707]: I1204 09:40:09.844426 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:09 crc kubenswrapper[4707]: E1204 09:40:09.845061 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:09 crc kubenswrapper[4707]: E1204 09:40:09.845270 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:09 crc kubenswrapper[4707]: I1204 09:40:09.845632 4707 scope.go:117] "RemoveContainer" containerID="1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025" Dec 04 09:40:09 crc kubenswrapper[4707]: E1204 09:40:09.845865 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6nd57_openshift-ovn-kubernetes(5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" Dec 04 09:40:10 crc kubenswrapper[4707]: I1204 09:40:10.844461 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:10 crc kubenswrapper[4707]: I1204 09:40:10.844550 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:10 crc kubenswrapper[4707]: E1204 09:40:10.848021 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:10 crc kubenswrapper[4707]: E1204 09:40:10.848202 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:11 crc kubenswrapper[4707]: I1204 09:40:11.844612 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:11 crc kubenswrapper[4707]: I1204 09:40:11.844604 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:11 crc kubenswrapper[4707]: E1204 09:40:11.845032 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:11 crc kubenswrapper[4707]: E1204 09:40:11.845189 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:12 crc kubenswrapper[4707]: I1204 09:40:12.844657 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:12 crc kubenswrapper[4707]: E1204 09:40:12.844833 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:12 crc kubenswrapper[4707]: I1204 09:40:12.845662 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:12 crc kubenswrapper[4707]: E1204 09:40:12.845937 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:13 crc kubenswrapper[4707]: I1204 09:40:13.844208 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:13 crc kubenswrapper[4707]: I1204 09:40:13.844316 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:13 crc kubenswrapper[4707]: E1204 09:40:13.844359 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:13 crc kubenswrapper[4707]: E1204 09:40:13.844611 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:14 crc kubenswrapper[4707]: I1204 09:40:14.844159 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:14 crc kubenswrapper[4707]: E1204 09:40:14.844296 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:14 crc kubenswrapper[4707]: I1204 09:40:14.844382 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:14 crc kubenswrapper[4707]: E1204 09:40:14.844559 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:14 crc kubenswrapper[4707]: I1204 09:40:14.992376 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-npc85_e9d3467a-1f4a-4d54-97b3-c7fd062eff13/kube-multus/1.log" Dec 04 09:40:14 crc kubenswrapper[4707]: I1204 09:40:14.993532 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-npc85_e9d3467a-1f4a-4d54-97b3-c7fd062eff13/kube-multus/0.log" Dec 04 09:40:14 crc kubenswrapper[4707]: I1204 09:40:14.993608 4707 generic.go:334] "Generic (PLEG): container finished" podID="e9d3467a-1f4a-4d54-97b3-c7fd062eff13" containerID="861f35b22449ce757109761274b8dca3126dee803aed610da0d726e6d510de95" exitCode=1 Dec 04 09:40:14 crc kubenswrapper[4707]: I1204 09:40:14.993658 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-npc85" event={"ID":"e9d3467a-1f4a-4d54-97b3-c7fd062eff13","Type":"ContainerDied","Data":"861f35b22449ce757109761274b8dca3126dee803aed610da0d726e6d510de95"} Dec 04 09:40:14 crc kubenswrapper[4707]: I1204 09:40:14.993713 4707 scope.go:117] "RemoveContainer" containerID="04ed808a5f6300e1a3d052bf8c123ffbfeb62bfe5ff09e967e457afbbb61976b" Dec 04 09:40:14 crc kubenswrapper[4707]: I1204 09:40:14.994401 4707 scope.go:117] "RemoveContainer" containerID="861f35b22449ce757109761274b8dca3126dee803aed610da0d726e6d510de95" Dec 04 09:40:14 crc kubenswrapper[4707]: E1204 09:40:14.994670 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-npc85_openshift-multus(e9d3467a-1f4a-4d54-97b3-c7fd062eff13)\"" pod="openshift-multus/multus-npc85" podUID="e9d3467a-1f4a-4d54-97b3-c7fd062eff13" Dec 04 09:40:15 crc kubenswrapper[4707]: I1204 09:40:15.018605 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9zq4f" podStartSLOduration=94.018584062 podStartE2EDuration="1m34.018584062s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:39:45.9130038 +0000 UTC m=+85.348826347" watchObservedRunningTime="2025-12-04 09:40:15.018584062 +0000 UTC m=+114.454406569" Dec 04 09:40:15 crc kubenswrapper[4707]: I1204 09:40:15.844228 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:15 crc kubenswrapper[4707]: I1204 09:40:15.844257 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:15 crc kubenswrapper[4707]: E1204 09:40:15.844496 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:15 crc kubenswrapper[4707]: E1204 09:40:15.844597 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:15 crc kubenswrapper[4707]: I1204 09:40:15.998711 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-npc85_e9d3467a-1f4a-4d54-97b3-c7fd062eff13/kube-multus/1.log" Dec 04 09:40:16 crc kubenswrapper[4707]: I1204 09:40:16.844787 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:16 crc kubenswrapper[4707]: I1204 09:40:16.844798 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:16 crc kubenswrapper[4707]: E1204 09:40:16.844947 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:16 crc kubenswrapper[4707]: E1204 09:40:16.845012 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:17 crc kubenswrapper[4707]: I1204 09:40:17.844188 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:17 crc kubenswrapper[4707]: I1204 09:40:17.844206 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:17 crc kubenswrapper[4707]: E1204 09:40:17.844319 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:17 crc kubenswrapper[4707]: E1204 09:40:17.844429 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:18 crc kubenswrapper[4707]: I1204 09:40:18.844206 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:18 crc kubenswrapper[4707]: E1204 09:40:18.844389 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:18 crc kubenswrapper[4707]: I1204 09:40:18.844543 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:18 crc kubenswrapper[4707]: E1204 09:40:18.844699 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:19 crc kubenswrapper[4707]: I1204 09:40:19.844924 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:19 crc kubenswrapper[4707]: I1204 09:40:19.844966 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:19 crc kubenswrapper[4707]: E1204 09:40:19.845147 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:19 crc kubenswrapper[4707]: E1204 09:40:19.845299 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:20 crc kubenswrapper[4707]: E1204 09:40:20.792972 4707 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 04 09:40:20 crc kubenswrapper[4707]: I1204 09:40:20.844125 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:20 crc kubenswrapper[4707]: I1204 09:40:20.844092 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:20 crc kubenswrapper[4707]: E1204 09:40:20.845663 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:20 crc kubenswrapper[4707]: E1204 09:40:20.845768 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:20 crc kubenswrapper[4707]: E1204 09:40:20.921565 4707 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 09:40:21 crc kubenswrapper[4707]: I1204 09:40:21.844388 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:21 crc kubenswrapper[4707]: I1204 09:40:21.844413 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:21 crc kubenswrapper[4707]: E1204 09:40:21.844831 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:21 crc kubenswrapper[4707]: E1204 09:40:21.844941 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:22 crc kubenswrapper[4707]: I1204 09:40:22.844107 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:22 crc kubenswrapper[4707]: E1204 09:40:22.844293 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:22 crc kubenswrapper[4707]: I1204 09:40:22.844326 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:22 crc kubenswrapper[4707]: E1204 09:40:22.844550 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:23 crc kubenswrapper[4707]: I1204 09:40:23.844396 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:23 crc kubenswrapper[4707]: I1204 09:40:23.844503 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:23 crc kubenswrapper[4707]: E1204 09:40:23.844533 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:23 crc kubenswrapper[4707]: E1204 09:40:23.845171 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:23 crc kubenswrapper[4707]: I1204 09:40:23.845762 4707 scope.go:117] "RemoveContainer" containerID="1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025" Dec 04 09:40:24 crc kubenswrapper[4707]: I1204 09:40:24.844744 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:24 crc kubenswrapper[4707]: I1204 09:40:24.844775 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:24 crc kubenswrapper[4707]: E1204 09:40:24.844895 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:24 crc kubenswrapper[4707]: E1204 09:40:24.845030 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:24 crc kubenswrapper[4707]: I1204 09:40:24.945938 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-txkn2"] Dec 04 09:40:25 crc kubenswrapper[4707]: I1204 09:40:25.029867 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/3.log" Dec 04 09:40:25 crc kubenswrapper[4707]: I1204 09:40:25.033301 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:25 crc kubenswrapper[4707]: E1204 09:40:25.033479 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:25 crc kubenswrapper[4707]: I1204 09:40:25.033650 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerStarted","Data":"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31"} Dec 04 09:40:25 crc kubenswrapper[4707]: I1204 09:40:25.034487 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:40:25 crc kubenswrapper[4707]: I1204 09:40:25.070068 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podStartSLOduration=104.07004322 podStartE2EDuration="1m44.07004322s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:25.068426959 +0000 UTC m=+124.504249496" watchObservedRunningTime="2025-12-04 09:40:25.07004322 +0000 UTC m=+124.505865747" Dec 04 09:40:25 crc kubenswrapper[4707]: I1204 09:40:25.844414 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:25 crc kubenswrapper[4707]: I1204 09:40:25.844451 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:25 crc kubenswrapper[4707]: E1204 09:40:25.844593 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:25 crc kubenswrapper[4707]: E1204 09:40:25.844812 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:25 crc kubenswrapper[4707]: E1204 09:40:25.923372 4707 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 09:40:26 crc kubenswrapper[4707]: I1204 09:40:26.844921 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:26 crc kubenswrapper[4707]: I1204 09:40:26.845055 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:26 crc kubenswrapper[4707]: E1204 09:40:26.845248 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:26 crc kubenswrapper[4707]: E1204 09:40:26.845531 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:26 crc kubenswrapper[4707]: I1204 09:40:26.846100 4707 scope.go:117] "RemoveContainer" containerID="861f35b22449ce757109761274b8dca3126dee803aed610da0d726e6d510de95" Dec 04 09:40:27 crc kubenswrapper[4707]: I1204 09:40:27.040872 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-npc85_e9d3467a-1f4a-4d54-97b3-c7fd062eff13/kube-multus/1.log" Dec 04 09:40:27 crc kubenswrapper[4707]: I1204 09:40:27.041204 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-npc85" event={"ID":"e9d3467a-1f4a-4d54-97b3-c7fd062eff13","Type":"ContainerStarted","Data":"bb57638a447d7d048f32e79b4b13aa36ab3639fa6197fbb6f5bdb6ec80fdcb24"} Dec 04 09:40:27 crc kubenswrapper[4707]: I1204 09:40:27.844194 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:27 crc kubenswrapper[4707]: I1204 09:40:27.844224 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:27 crc kubenswrapper[4707]: E1204 09:40:27.844421 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:27 crc kubenswrapper[4707]: E1204 09:40:27.844497 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:28 crc kubenswrapper[4707]: I1204 09:40:28.845201 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:28 crc kubenswrapper[4707]: E1204 09:40:28.845390 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:28 crc kubenswrapper[4707]: I1204 09:40:28.845625 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:28 crc kubenswrapper[4707]: E1204 09:40:28.845891 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:29 crc kubenswrapper[4707]: I1204 09:40:29.843893 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:29 crc kubenswrapper[4707]: I1204 09:40:29.843946 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:29 crc kubenswrapper[4707]: E1204 09:40:29.844073 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 09:40:29 crc kubenswrapper[4707]: E1204 09:40:29.844169 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 09:40:30 crc kubenswrapper[4707]: I1204 09:40:30.843970 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:30 crc kubenswrapper[4707]: E1204 09:40:30.845191 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-txkn2" podUID="9a8009fd-d652-44fb-8ef1-73078262e8fa" Dec 04 09:40:30 crc kubenswrapper[4707]: I1204 09:40:30.845437 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:30 crc kubenswrapper[4707]: E1204 09:40:30.845508 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 09:40:31 crc kubenswrapper[4707]: I1204 09:40:31.844813 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:31 crc kubenswrapper[4707]: I1204 09:40:31.844893 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:31 crc kubenswrapper[4707]: I1204 09:40:31.847910 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 04 09:40:31 crc kubenswrapper[4707]: I1204 09:40:31.848035 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 04 09:40:31 crc kubenswrapper[4707]: I1204 09:40:31.848032 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 04 09:40:31 crc kubenswrapper[4707]: I1204 09:40:31.848238 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 04 09:40:32 crc kubenswrapper[4707]: I1204 09:40:32.844766 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:32 crc kubenswrapper[4707]: I1204 09:40:32.844849 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:40:32 crc kubenswrapper[4707]: I1204 09:40:32.847319 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 04 09:40:32 crc kubenswrapper[4707]: I1204 09:40:32.847431 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.533329 4707 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.592077 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.592878 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.592992 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v52ch"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.593450 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.594899 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-22vzf"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.595502 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.601839 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.602480 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.602720 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.603650 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.603710 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.603897 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.603956 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.604265 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.604475 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.604548 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.604741 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.604907 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.605264 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.605920 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.607270 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-ddxxg"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.607763 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.608822 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.612981 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.614215 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.618635 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.620656 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.621486 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.625580 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.627169 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.627636 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.628512 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.628529 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.634279 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.630045 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rttms"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.631278 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.632960 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.635126 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.635276 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.635543 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.635791 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.636124 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.636399 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.636498 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.636456 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.636781 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.636932 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.636694 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.636747 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.637246 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.637289 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.637477 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.637880 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.638079 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.638494 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.638663 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.638808 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.638813 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.638946 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.639048 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.638142 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.639158 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.639246 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.639269 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.639575 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.640205 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.641549 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.641756 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.641957 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.642076 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-g47v2"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.644015 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.644438 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-56ncv"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.663412 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.666899 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-854p6"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.672805 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.673195 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-g47v2" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.674110 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7th45"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.674121 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.674293 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.688421 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.689626 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.689908 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-22vzf"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.689977 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.690009 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.690097 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.690118 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.690401 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.690764 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.690964 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.691034 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.691068 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.691120 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.691183 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.691198 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.691249 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.691285 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.691324 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.691504 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.691613 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.691626 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.691984 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.692229 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.692870 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n74dl"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.693586 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.693621 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.693663 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.693824 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.693963 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.694071 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.694127 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.694345 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.694458 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.694728 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.694734 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.694833 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.695188 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.695412 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.695837 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.697927 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.698481 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.702511 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.702726 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.702802 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.702967 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.703237 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.703294 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.703752 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.704292 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.705879 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.706293 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709488 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/035089c9-d1b0-465a-93eb-ec137a57d79c-auth-proxy-config\") pod \"machine-approver-56656f9798-vh6kq\" (UID: \"035089c9-d1b0-465a-93eb-ec137a57d79c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709531 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-config\") pod \"route-controller-manager-6576b87f9c-6fb9c\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709556 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/025e2d7e-ab23-4cf6-8d4c-b114ca8733b7-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-hcnc5\" (UID: \"025e2d7e-ab23-4cf6-8d4c-b114ca8733b7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709574 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99bdb744-6ab3-42ac-9729-137102bdfe72-serving-cert\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709592 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/5a08fa03-e041-425a-b5e8-05300cdac87b-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-22vzf\" (UID: \"5a08fa03-e041-425a-b5e8-05300cdac87b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709610 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxsm5\" (UniqueName: \"kubernetes.io/projected/5a08fa03-e041-425a-b5e8-05300cdac87b-kube-api-access-sxsm5\") pod \"machine-api-operator-5694c8668f-22vzf\" (UID: \"5a08fa03-e041-425a-b5e8-05300cdac87b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709626 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/aaf87e66-a7cb-4692-be61-67acc2b09236-encryption-config\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709643 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdspz\" (UniqueName: \"kubernetes.io/projected/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-kube-api-access-vdspz\") pod \"route-controller-manager-6576b87f9c-6fb9c\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709662 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-serving-cert\") pod \"route-controller-manager-6576b87f9c-6fb9c\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709678 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgl8h\" (UniqueName: \"kubernetes.io/projected/99bdb744-6ab3-42ac-9729-137102bdfe72-kube-api-access-pgl8h\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709695 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17825893-3f22-4973-a57c-6645ca6a2c31-config\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709709 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17825893-3f22-4973-a57c-6645ca6a2c31-service-ca-bundle\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709723 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-874sm\" (UniqueName: \"kubernetes.io/projected/17825893-3f22-4973-a57c-6645ca6a2c31-kube-api-access-874sm\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709938 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-client-ca\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709960 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/aaf87e66-a7cb-4692-be61-67acc2b09236-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.709975 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/035089c9-d1b0-465a-93eb-ec137a57d79c-config\") pod \"machine-approver-56656f9798-vh6kq\" (UID: \"035089c9-d1b0-465a-93eb-ec137a57d79c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710001 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de0e91e1-5286-4464-823b-7e930e40e360-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dqzrz\" (UID: \"de0e91e1-5286-4464-823b-7e930e40e360\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710016 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17825893-3f22-4973-a57c-6645ca6a2c31-serving-cert\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710038 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aaf87e66-a7cb-4692-be61-67acc2b09236-audit-dir\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710052 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/035089c9-d1b0-465a-93eb-ec137a57d79c-machine-approver-tls\") pod \"machine-approver-56656f9798-vh6kq\" (UID: \"035089c9-d1b0-465a-93eb-ec137a57d79c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710067 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-config\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710081 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48lhz\" (UniqueName: \"kubernetes.io/projected/035089c9-d1b0-465a-93eb-ec137a57d79c-kube-api-access-48lhz\") pod \"machine-approver-56656f9798-vh6kq\" (UID: \"035089c9-d1b0-465a-93eb-ec137a57d79c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710100 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd2dn\" (UniqueName: \"kubernetes.io/projected/de0e91e1-5286-4464-823b-7e930e40e360-kube-api-access-sd2dn\") pod \"openshift-apiserver-operator-796bbdcf4f-dqzrz\" (UID: \"de0e91e1-5286-4464-823b-7e930e40e360\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710116 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-client-ca\") pod \"route-controller-manager-6576b87f9c-6fb9c\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710130 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aaf87e66-a7cb-4692-be61-67acc2b09236-audit-policies\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710145 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17825893-3f22-4973-a57c-6645ca6a2c31-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710160 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmnd4\" (UniqueName: \"kubernetes.io/projected/025e2d7e-ab23-4cf6-8d4c-b114ca8733b7-kube-api-access-wmnd4\") pod \"cluster-samples-operator-665b6dd947-hcnc5\" (UID: \"025e2d7e-ab23-4cf6-8d4c-b114ca8733b7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710175 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aaf87e66-a7cb-4692-be61-67acc2b09236-serving-cert\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710197 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a08fa03-e041-425a-b5e8-05300cdac87b-config\") pod \"machine-api-operator-5694c8668f-22vzf\" (UID: \"5a08fa03-e041-425a-b5e8-05300cdac87b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710214 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5a08fa03-e041-425a-b5e8-05300cdac87b-images\") pod \"machine-api-operator-5694c8668f-22vzf\" (UID: \"5a08fa03-e041-425a-b5e8-05300cdac87b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710228 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de0e91e1-5286-4464-823b-7e930e40e360-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dqzrz\" (UID: \"de0e91e1-5286-4464-823b-7e930e40e360\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710244 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwprh\" (UniqueName: \"kubernetes.io/projected/aaf87e66-a7cb-4692-be61-67acc2b09236-kube-api-access-qwprh\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710286 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/aaf87e66-a7cb-4692-be61-67acc2b09236-etcd-client\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710318 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.710360 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aaf87e66-a7cb-4692-be61-67acc2b09236-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.711434 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.711593 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.718370 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.717207 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.720138 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-8hd98"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.720657 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.721058 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.721314 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.726273 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.728304 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.730443 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.742956 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.745307 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.745935 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.746009 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.761188 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.762519 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.762656 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.762939 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rkq6j"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.763385 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-rkq6j" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.763433 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.764115 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-7287k"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.764381 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.764803 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dwmkj"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.764912 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7287k" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.765584 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dwmkj" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.772439 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.772859 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.773105 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.773640 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.774211 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.774222 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.774921 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.775284 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9847h"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.775919 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.776564 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v52ch"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.778098 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.778875 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.779538 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-lrm44"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.780298 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.780949 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-lrm44" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.781452 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.782009 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.783655 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.786487 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ncmcs"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.786604 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.787409 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.787512 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.788393 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.789114 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.789131 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.789328 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.790027 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.791638 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-6n9t7"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.792292 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gfbcs"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.792650 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-gfbcs" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.792832 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-6n9t7" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.794851 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.795400 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.795987 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.797208 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.799006 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.800975 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-g47v2"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.802244 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.809859 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-854p6"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.811456 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.814458 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-56ncv"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.815263 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.816007 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.817386 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.818739 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rttms"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.819198 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxsm5\" (UniqueName: \"kubernetes.io/projected/5a08fa03-e041-425a-b5e8-05300cdac87b-kube-api-access-sxsm5\") pod \"machine-api-operator-5694c8668f-22vzf\" (UID: \"5a08fa03-e041-425a-b5e8-05300cdac87b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.819229 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3b305e45-7c10-459e-b8ed-1192baa0b469-console-config\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.819250 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdspz\" (UniqueName: \"kubernetes.io/projected/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-kube-api-access-vdspz\") pod \"route-controller-manager-6576b87f9c-6fb9c\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.819267 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/aaf87e66-a7cb-4692-be61-67acc2b09236-encryption-config\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.819282 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-serving-cert\") pod \"route-controller-manager-6576b87f9c-6fb9c\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.819297 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17825893-3f22-4973-a57c-6645ca6a2c31-service-ca-bundle\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.819318 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-874sm\" (UniqueName: \"kubernetes.io/projected/17825893-3f22-4973-a57c-6645ca6a2c31-kube-api-access-874sm\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.819381 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgl8h\" (UniqueName: \"kubernetes.io/projected/99bdb744-6ab3-42ac-9729-137102bdfe72-kube-api-access-pgl8h\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.819405 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/53f6fd9a-495b-4e68-9f43-6788ff997184-node-pullsecrets\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.819889 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-config\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.819976 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17825893-3f22-4973-a57c-6645ca6a2c31-config\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.820090 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3863a76c-1217-480f-9a0b-f7f708af94fc-config\") pod \"kube-apiserver-operator-766d6c64bb-k6cdl\" (UID: \"3863a76c-1217-480f-9a0b-f7f708af94fc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.820173 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-client-ca\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.820868 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17825893-3f22-4973-a57c-6645ca6a2c31-service-ca-bundle\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.821063 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/aaf87e66-a7cb-4692-be61-67acc2b09236-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.821238 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/53f6fd9a-495b-4e68-9f43-6788ff997184-etcd-client\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.821313 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/035089c9-d1b0-465a-93eb-ec137a57d79c-config\") pod \"machine-approver-56656f9798-vh6kq\" (UID: \"035089c9-d1b0-465a-93eb-ec137a57d79c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.821459 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/53f6fd9a-495b-4e68-9f43-6788ff997184-audit-dir\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.821568 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgj4s\" (UniqueName: \"kubernetes.io/projected/3b305e45-7c10-459e-b8ed-1192baa0b469-kube-api-access-rgj4s\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.821745 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.821826 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17825893-3f22-4973-a57c-6645ca6a2c31-serving-cert\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.821906 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de0e91e1-5286-4464-823b-7e930e40e360-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dqzrz\" (UID: \"de0e91e1-5286-4464-823b-7e930e40e360\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.821988 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hqbl\" (UniqueName: \"kubernetes.io/projected/0436692e-40df-4130-8bd1-2059aeeeac11-kube-api-access-8hqbl\") pod \"downloads-7954f5f757-g47v2\" (UID: \"0436692e-40df-4130-8bd1-2059aeeeac11\") " pod="openshift-console/downloads-7954f5f757-g47v2" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.822060 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17825893-3f22-4973-a57c-6645ca6a2c31-config\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.822081 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aaf87e66-a7cb-4692-be61-67acc2b09236-audit-dir\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.822213 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b305e45-7c10-459e-b8ed-1192baa0b469-trusted-ca-bundle\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.822310 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aaf87e66-a7cb-4692-be61-67acc2b09236-audit-dir\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.821480 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/aaf87e66-a7cb-4692-be61-67acc2b09236-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.822460 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/035089c9-d1b0-465a-93eb-ec137a57d79c-config\") pod \"machine-approver-56656f9798-vh6kq\" (UID: \"035089c9-d1b0-465a-93eb-ec137a57d79c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.822468 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-audit\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.822558 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-etcd-serving-ca\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.822727 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/035089c9-d1b0-465a-93eb-ec137a57d79c-machine-approver-tls\") pod \"machine-approver-56656f9798-vh6kq\" (UID: \"035089c9-d1b0-465a-93eb-ec137a57d79c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.822730 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-client-ca\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.822811 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.822973 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-config\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.823098 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48lhz\" (UniqueName: \"kubernetes.io/projected/035089c9-d1b0-465a-93eb-ec137a57d79c-kube-api-access-48lhz\") pod \"machine-approver-56656f9798-vh6kq\" (UID: \"035089c9-d1b0-465a-93eb-ec137a57d79c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.823202 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd2dn\" (UniqueName: \"kubernetes.io/projected/de0e91e1-5286-4464-823b-7e930e40e360-kube-api-access-sd2dn\") pod \"openshift-apiserver-operator-796bbdcf4f-dqzrz\" (UID: \"de0e91e1-5286-4464-823b-7e930e40e360\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.823279 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de0e91e1-5286-4464-823b-7e930e40e360-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dqzrz\" (UID: \"de0e91e1-5286-4464-823b-7e930e40e360\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.823290 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53f6fd9a-495b-4e68-9f43-6788ff997184-serving-cert\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.823543 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3b305e45-7c10-459e-b8ed-1192baa0b469-oauth-serving-cert\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.826011 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/aaf87e66-a7cb-4692-be61-67acc2b09236-encryption-config\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.826074 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-serving-cert\") pod \"route-controller-manager-6576b87f9c-6fb9c\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.826118 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-client-ca\") pod \"route-controller-manager-6576b87f9c-6fb9c\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.826262 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aaf87e66-a7cb-4692-be61-67acc2b09236-audit-policies\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.826362 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17825893-3f22-4973-a57c-6645ca6a2c31-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.826498 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-config\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.826457 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3b305e45-7c10-459e-b8ed-1192baa0b469-console-serving-cert\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.826602 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aaf87e66-a7cb-4692-be61-67acc2b09236-serving-cert\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.826851 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmnd4\" (UniqueName: \"kubernetes.io/projected/025e2d7e-ab23-4cf6-8d4c-b114ca8733b7-kube-api-access-wmnd4\") pod \"cluster-samples-operator-665b6dd947-hcnc5\" (UID: \"025e2d7e-ab23-4cf6-8d4c-b114ca8733b7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.827894 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a08fa03-e041-425a-b5e8-05300cdac87b-config\") pod \"machine-api-operator-5694c8668f-22vzf\" (UID: \"5a08fa03-e041-425a-b5e8-05300cdac87b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.827937 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-image-import-ca\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.827990 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5a08fa03-e041-425a-b5e8-05300cdac87b-images\") pod \"machine-api-operator-5694c8668f-22vzf\" (UID: \"5a08fa03-e041-425a-b5e8-05300cdac87b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.828067 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de0e91e1-5286-4464-823b-7e930e40e360-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dqzrz\" (UID: \"de0e91e1-5286-4464-823b-7e930e40e360\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.828196 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/035089c9-d1b0-465a-93eb-ec137a57d79c-machine-approver-tls\") pod \"machine-approver-56656f9798-vh6kq\" (UID: \"035089c9-d1b0-465a-93eb-ec137a57d79c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.828179 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwprh\" (UniqueName: \"kubernetes.io/projected/aaf87e66-a7cb-4692-be61-67acc2b09236-kube-api-access-qwprh\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.828251 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3863a76c-1217-480f-9a0b-f7f708af94fc-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-k6cdl\" (UID: \"3863a76c-1217-480f-9a0b-f7f708af94fc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.828369 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/aaf87e66-a7cb-4692-be61-67acc2b09236-etcd-client\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.829344 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-client-ca\") pod \"route-controller-manager-6576b87f9c-6fb9c\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.829555 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17825893-3f22-4973-a57c-6645ca6a2c31-serving-cert\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.830019 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a08fa03-e041-425a-b5e8-05300cdac87b-config\") pod \"machine-api-operator-5694c8668f-22vzf\" (UID: \"5a08fa03-e041-425a-b5e8-05300cdac87b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.830030 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aaf87e66-a7cb-4692-be61-67acc2b09236-audit-policies\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.830425 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpt7r\" (UniqueName: \"kubernetes.io/projected/53f6fd9a-495b-4e68-9f43-6788ff997184-kube-api-access-kpt7r\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.830436 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5a08fa03-e041-425a-b5e8-05300cdac87b-images\") pod \"machine-api-operator-5694c8668f-22vzf\" (UID: \"5a08fa03-e041-425a-b5e8-05300cdac87b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.830458 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.830506 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aaf87e66-a7cb-4692-be61-67acc2b09236-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.833873 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3863a76c-1217-480f-9a0b-f7f708af94fc-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-k6cdl\" (UID: \"3863a76c-1217-480f-9a0b-f7f708af94fc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.833937 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/035089c9-d1b0-465a-93eb-ec137a57d79c-auth-proxy-config\") pod \"machine-approver-56656f9798-vh6kq\" (UID: \"035089c9-d1b0-465a-93eb-ec137a57d79c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.833972 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-config\") pod \"route-controller-manager-6576b87f9c-6fb9c\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.833996 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/025e2d7e-ab23-4cf6-8d4c-b114ca8733b7-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-hcnc5\" (UID: \"025e2d7e-ab23-4cf6-8d4c-b114ca8733b7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.834024 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/53f6fd9a-495b-4e68-9f43-6788ff997184-encryption-config\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.834052 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3b305e45-7c10-459e-b8ed-1192baa0b469-console-oauth-config\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.834079 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3b305e45-7c10-459e-b8ed-1192baa0b469-service-ca\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.834103 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99bdb744-6ab3-42ac-9729-137102bdfe72-serving-cert\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.834128 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/5a08fa03-e041-425a-b5e8-05300cdac87b-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-22vzf\" (UID: \"5a08fa03-e041-425a-b5e8-05300cdac87b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.834230 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/aaf87e66-a7cb-4692-be61-67acc2b09236-etcd-client\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.834280 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de0e91e1-5286-4464-823b-7e930e40e360-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dqzrz\" (UID: \"de0e91e1-5286-4464-823b-7e930e40e360\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.834299 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.834314 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aaf87e66-a7cb-4692-be61-67acc2b09236-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.835063 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aaf87e66-a7cb-4692-be61-67acc2b09236-serving-cert\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.835372 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/035089c9-d1b0-465a-93eb-ec137a57d79c-auth-proxy-config\") pod \"machine-approver-56656f9798-vh6kq\" (UID: \"035089c9-d1b0-465a-93eb-ec137a57d79c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.835456 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.836430 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-config\") pod \"route-controller-manager-6576b87f9c-6fb9c\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.837048 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17825893-3f22-4973-a57c-6645ca6a2c31-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.837476 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/5a08fa03-e041-425a-b5e8-05300cdac87b-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-22vzf\" (UID: \"5a08fa03-e041-425a-b5e8-05300cdac87b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.839187 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99bdb744-6ab3-42ac-9729-137102bdfe72-serving-cert\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.839274 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-lrm44"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.840584 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-ddxxg"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.841558 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/025e2d7e-ab23-4cf6-8d4c-b114ca8733b7-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-hcnc5\" (UID: \"025e2d7e-ab23-4cf6-8d4c-b114ca8733b7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.841779 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dwmkj"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.843367 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7th45"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.844989 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n74dl"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.846585 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.847772 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-6n9t7"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.848953 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.850177 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rkq6j"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.851230 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.852579 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9847h"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.853708 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ncmcs"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.854912 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.855526 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.856648 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.857784 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.858954 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-7287k"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.860018 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.866069 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.866100 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-9xnqw"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.867071 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-z9vbh"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.868025 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.868125 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.868384 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9xnqw" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.868787 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9xnqw"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.870371 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gfbcs"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.872892 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-z9vbh"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.872917 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-g8flk"] Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.873485 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-g8flk" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.877725 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.895031 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.934629 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.934624 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/53f6fd9a-495b-4e68-9f43-6788ff997184-encryption-config\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.935065 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3b305e45-7c10-459e-b8ed-1192baa0b469-console-oauth-config\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.935160 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3b305e45-7c10-459e-b8ed-1192baa0b469-service-ca\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.935356 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3b305e45-7c10-459e-b8ed-1192baa0b469-console-config\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.936144 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-config\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.936295 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/53f6fd9a-495b-4e68-9f43-6788ff997184-node-pullsecrets\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.936415 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3863a76c-1217-480f-9a0b-f7f708af94fc-config\") pod \"kube-apiserver-operator-766d6c64bb-k6cdl\" (UID: \"3863a76c-1217-480f-9a0b-f7f708af94fc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.936529 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/53f6fd9a-495b-4e68-9f43-6788ff997184-etcd-client\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.936661 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgj4s\" (UniqueName: \"kubernetes.io/projected/3b305e45-7c10-459e-b8ed-1192baa0b469-kube-api-access-rgj4s\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.936775 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/53f6fd9a-495b-4e68-9f43-6788ff997184-audit-dir\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.936854 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/53f6fd9a-495b-4e68-9f43-6788ff997184-audit-dir\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.936089 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3b305e45-7c10-459e-b8ed-1192baa0b469-service-ca\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.936733 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-config\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.936440 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/53f6fd9a-495b-4e68-9f43-6788ff997184-node-pullsecrets\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.936929 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3863a76c-1217-480f-9a0b-f7f708af94fc-config\") pod \"kube-apiserver-operator-766d6c64bb-k6cdl\" (UID: \"3863a76c-1217-480f-9a0b-f7f708af94fc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.936437 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3b305e45-7c10-459e-b8ed-1192baa0b469-console-config\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.937184 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.937364 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hqbl\" (UniqueName: \"kubernetes.io/projected/0436692e-40df-4130-8bd1-2059aeeeac11-kube-api-access-8hqbl\") pod \"downloads-7954f5f757-g47v2\" (UID: \"0436692e-40df-4130-8bd1-2059aeeeac11\") " pod="openshift-console/downloads-7954f5f757-g47v2" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.937477 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b305e45-7c10-459e-b8ed-1192baa0b469-trusted-ca-bundle\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.937573 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-audit\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.937700 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-etcd-serving-ca\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.937825 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53f6fd9a-495b-4e68-9f43-6788ff997184-serving-cert\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.937927 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3b305e45-7c10-459e-b8ed-1192baa0b469-oauth-serving-cert\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.938031 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3b305e45-7c10-459e-b8ed-1192baa0b469-console-serving-cert\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.938161 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-image-import-ca\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.938361 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-audit\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.938296 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-etcd-serving-ca\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.937985 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.938700 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3b305e45-7c10-459e-b8ed-1192baa0b469-oauth-serving-cert\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.938887 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3863a76c-1217-480f-9a0b-f7f708af94fc-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-k6cdl\" (UID: \"3863a76c-1217-480f-9a0b-f7f708af94fc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.938998 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpt7r\" (UniqueName: \"kubernetes.io/projected/53f6fd9a-495b-4e68-9f43-6788ff997184-kube-api-access-kpt7r\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.939151 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3863a76c-1217-480f-9a0b-f7f708af94fc-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-k6cdl\" (UID: \"3863a76c-1217-480f-9a0b-f7f708af94fc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.939253 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/53f6fd9a-495b-4e68-9f43-6788ff997184-etcd-client\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.938889 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b305e45-7c10-459e-b8ed-1192baa0b469-trusted-ca-bundle\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.939195 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/53f6fd9a-495b-4e68-9f43-6788ff997184-image-import-ca\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.939567 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3b305e45-7c10-459e-b8ed-1192baa0b469-console-oauth-config\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.940366 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53f6fd9a-495b-4e68-9f43-6788ff997184-serving-cert\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.941976 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3863a76c-1217-480f-9a0b-f7f708af94fc-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-k6cdl\" (UID: \"3863a76c-1217-480f-9a0b-f7f708af94fc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.942165 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/53f6fd9a-495b-4e68-9f43-6788ff997184-encryption-config\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.942580 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3b305e45-7c10-459e-b8ed-1192baa0b469-console-serving-cert\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.955723 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.975046 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 04 09:40:35 crc kubenswrapper[4707]: I1204 09:40:35.995395 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.014651 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.035252 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.055954 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.075616 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.095517 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.114798 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.135141 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.155744 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.176252 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.196238 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.215961 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.236372 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.255496 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.275981 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.296099 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.316436 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.335452 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.356007 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.377062 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.396161 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.415164 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.435579 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.455701 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.475934 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.495702 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.516042 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.535531 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.555874 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.575476 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.596534 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.615874 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.636197 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.655663 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.674757 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.695708 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.716135 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.735710 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.756290 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.775114 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.794258 4707 request.go:700] Waited for 1.017976067s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-etcd-operator/secrets?fieldSelector=metadata.name%3Detcd-client&limit=500&resourceVersion=0 Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.796033 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.815805 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.835651 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.855651 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.883771 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.895600 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.915086 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.935462 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.954683 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.974970 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 04 09:40:36 crc kubenswrapper[4707]: I1204 09:40:36.995461 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.015294 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.035601 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.056196 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.076042 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.095572 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.115920 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.136633 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.155661 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.186304 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.195537 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.216713 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.235820 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.256317 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.275449 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.295757 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.315793 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.336421 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.355856 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.376103 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.396759 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.416885 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.436612 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.455916 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.502223 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxsm5\" (UniqueName: \"kubernetes.io/projected/5a08fa03-e041-425a-b5e8-05300cdac87b-kube-api-access-sxsm5\") pod \"machine-api-operator-5694c8668f-22vzf\" (UID: \"5a08fa03-e041-425a-b5e8-05300cdac87b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.512586 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdspz\" (UniqueName: \"kubernetes.io/projected/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-kube-api-access-vdspz\") pod \"route-controller-manager-6576b87f9c-6fb9c\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.516329 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.531002 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.535149 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-874sm\" (UniqueName: \"kubernetes.io/projected/17825893-3f22-4973-a57c-6645ca6a2c31-kube-api-access-874sm\") pod \"authentication-operator-69f744f599-ddxxg\" (UID: \"17825893-3f22-4973-a57c-6645ca6a2c31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.562558 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48lhz\" (UniqueName: \"kubernetes.io/projected/035089c9-d1b0-465a-93eb-ec137a57d79c-kube-api-access-48lhz\") pod \"machine-approver-56656f9798-vh6kq\" (UID: \"035089c9-d1b0-465a-93eb-ec137a57d79c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.569794 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd2dn\" (UniqueName: \"kubernetes.io/projected/de0e91e1-5286-4464-823b-7e930e40e360-kube-api-access-sd2dn\") pod \"openshift-apiserver-operator-796bbdcf4f-dqzrz\" (UID: \"de0e91e1-5286-4464-823b-7e930e40e360\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.578257 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.589080 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.590221 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgl8h\" (UniqueName: \"kubernetes.io/projected/99bdb744-6ab3-42ac-9729-137102bdfe72-kube-api-access-pgl8h\") pod \"controller-manager-879f6c89f-v52ch\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.612642 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmnd4\" (UniqueName: \"kubernetes.io/projected/025e2d7e-ab23-4cf6-8d4c-b114ca8733b7-kube-api-access-wmnd4\") pod \"cluster-samples-operator-665b6dd947-hcnc5\" (UID: \"025e2d7e-ab23-4cf6-8d4c-b114ca8733b7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.630292 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwprh\" (UniqueName: \"kubernetes.io/projected/aaf87e66-a7cb-4692-be61-67acc2b09236-kube-api-access-qwprh\") pod \"apiserver-7bbb656c7d-chj9t\" (UID: \"aaf87e66-a7cb-4692-be61-67acc2b09236\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.655007 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.676899 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.695881 4707 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.716783 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.735446 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.752252 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-22vzf"] Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.757661 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.766549 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz" Dec 04 09:40:37 crc kubenswrapper[4707]: W1204 09:40:37.767981 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a08fa03_e041_425a_b5e8_05300cdac87b.slice/crio-8203fbda058f6362a1cd5a01e02430ef71a43b0fddd60bb5f4af4b3672beba8f WatchSource:0}: Error finding container 8203fbda058f6362a1cd5a01e02430ef71a43b0fddd60bb5f4af4b3672beba8f: Status 404 returned error can't find the container with id 8203fbda058f6362a1cd5a01e02430ef71a43b0fddd60bb5f4af4b3672beba8f Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.775653 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.783294 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c"] Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.789794 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.794738 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.805622 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-ddxxg"] Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.813446 4707 request.go:700] Waited for 1.939725808s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-server-tls&limit=500&resourceVersion=0 Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.814979 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.836129 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.893927 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgj4s\" (UniqueName: \"kubernetes.io/projected/3b305e45-7c10-459e-b8ed-1192baa0b469-kube-api-access-rgj4s\") pod \"console-f9d7485db-56ncv\" (UID: \"3b305e45-7c10-459e-b8ed-1192baa0b469\") " pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.911977 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hqbl\" (UniqueName: \"kubernetes.io/projected/0436692e-40df-4130-8bd1-2059aeeeac11-kube-api-access-8hqbl\") pod \"downloads-7954f5f757-g47v2\" (UID: \"0436692e-40df-4130-8bd1-2059aeeeac11\") " pod="openshift-console/downloads-7954f5f757-g47v2" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.928681 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3863a76c-1217-480f-9a0b-f7f708af94fc-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-k6cdl\" (UID: \"3863a76c-1217-480f-9a0b-f7f708af94fc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl" Dec 04 09:40:37 crc kubenswrapper[4707]: I1204 09:40:37.949170 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpt7r\" (UniqueName: \"kubernetes.io/projected/53f6fd9a-495b-4e68-9f43-6788ff997184-kube-api-access-kpt7r\") pod \"apiserver-76f77b778f-7th45\" (UID: \"53f6fd9a-495b-4e68-9f43-6788ff997184\") " pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.076520 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" event={"ID":"5a08fa03-e041-425a-b5e8-05300cdac87b","Type":"ContainerStarted","Data":"8203fbda058f6362a1cd5a01e02430ef71a43b0fddd60bb5f4af4b3672beba8f"} Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.077535 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" event={"ID":"17825893-3f22-4973-a57c-6645ca6a2c31","Type":"ContainerStarted","Data":"8e27bb94aac4530be9317f1df8a4c1ffb3a55c7d4d1988055986cc81eebbb7b1"} Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.078649 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" event={"ID":"035089c9-d1b0-465a-93eb-ec137a57d79c","Type":"ContainerStarted","Data":"6400da7cb732153501b170af96db9d47a4b15c422bff7912ef289a1b96ec9151"} Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.079581 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" event={"ID":"0ebc6de2-21f5-41ae-800d-8ceb365c7b88","Type":"ContainerStarted","Data":"13478c26febb87d98a7d0031dc5d330ead6e59b047bc470b18fd072595d26f00"} Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.227036 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.227533 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.227813 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.228426 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-g47v2" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.229136 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.229236 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/44b581fc-38bf-4c33-820c-f27a4a730932-ca-trust-extracted\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.229305 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-registry-tls\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.229718 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.229930 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl" Dec 04 09:40:38 crc kubenswrapper[4707]: E1204 09:40:38.231726 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:38.731696083 +0000 UTC m=+138.167518630 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.245843 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz"] Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.330457 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:38 crc kubenswrapper[4707]: E1204 09:40:38.330586 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:38.830568528 +0000 UTC m=+138.266391035 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.330614 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/44b581fc-38bf-4c33-820c-f27a4a730932-registry-certificates\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.330647 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fjwg\" (UniqueName: \"kubernetes.io/projected/9186ac86-6e42-41af-a520-839e71f4c41c-kube-api-access-6fjwg\") pod \"openshift-controller-manager-operator-756b6f6bc6-9kn65\" (UID: \"9186ac86-6e42-41af-a520-839e71f4c41c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.330670 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7647af53-61ba-409d-90c4-25d6ee0a022d-stats-auth\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.330692 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7647af53-61ba-409d-90c4-25d6ee0a022d-metrics-certs\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.330723 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.331250 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44b581fc-38bf-4c33-820c-f27a4a730932-trusted-ca\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.331356 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/44b581fc-38bf-4c33-820c-f27a4a730932-installation-pull-secrets\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.331407 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.331511 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/44b581fc-38bf-4c33-820c-f27a4a730932-ca-trust-extracted\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.331919 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nfrdd\" (UID: \"ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.331951 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nfrdd\" (UID: \"ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.332393 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/44b581fc-38bf-4c33-820c-f27a4a730932-ca-trust-extracted\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.332643 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.332679 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lchq2\" (UniqueName: \"kubernetes.io/projected/7bce0373-35c7-420e-a6cf-7f5bc2d1bba6-kube-api-access-lchq2\") pod \"openshift-config-operator-7777fb866f-n4mnv\" (UID: \"7bce0373-35c7-420e-a6cf-7f5bc2d1bba6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.332730 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-registry-tls\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.332750 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-567qw\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-kube-api-access-567qw\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.332775 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.333702 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.333941 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9186ac86-6e42-41af-a520-839e71f4c41c-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-9kn65\" (UID: \"9186ac86-6e42-41af-a520-839e71f4c41c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.334538 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.334583 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/91599765-e650-4b7d-9681-a509921b0f24-trusted-ca\") pod \"console-operator-58897d9998-854p6\" (UID: \"91599765-e650-4b7d-9681-a509921b0f24\") " pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.334719 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.335008 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4flj\" (UniqueName: \"kubernetes.io/projected/ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a-kube-api-access-t4flj\") pod \"cluster-image-registry-operator-dc59b4c8b-nfrdd\" (UID: \"ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.335718 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42qqb\" (UniqueName: \"kubernetes.io/projected/91599765-e650-4b7d-9681-a509921b0f24-kube-api-access-42qqb\") pod \"console-operator-58897d9998-854p6\" (UID: \"91599765-e650-4b7d-9681-a509921b0f24\") " pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.336209 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-audit-policies\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.336456 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-bound-sa-token\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.336699 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5bb9\" (UniqueName: \"kubernetes.io/projected/7647af53-61ba-409d-90c4-25d6ee0a022d-kube-api-access-w5bb9\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.336884 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nfrdd\" (UID: \"ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.336916 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p68fv\" (UniqueName: \"kubernetes.io/projected/916e5756-f645-44f7-b26d-706a87c57ed8-kube-api-access-p68fv\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.337016 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.337125 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: E1204 09:40:38.337838 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:38.837821994 +0000 UTC m=+138.273644501 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.338808 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.338878 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.338912 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/7bce0373-35c7-420e-a6cf-7f5bc2d1bba6-available-featuregates\") pod \"openshift-config-operator-7777fb866f-n4mnv\" (UID: \"7bce0373-35c7-420e-a6cf-7f5bc2d1bba6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.339002 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/916e5756-f645-44f7-b26d-706a87c57ed8-audit-dir\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.339032 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9186ac86-6e42-41af-a520-839e71f4c41c-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-9kn65\" (UID: \"9186ac86-6e42-41af-a520-839e71f4c41c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.339063 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.339086 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7647af53-61ba-409d-90c4-25d6ee0a022d-service-ca-bundle\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.339756 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bce0373-35c7-420e-a6cf-7f5bc2d1bba6-serving-cert\") pod \"openshift-config-operator-7777fb866f-n4mnv\" (UID: \"7bce0373-35c7-420e-a6cf-7f5bc2d1bba6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.340426 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/91599765-e650-4b7d-9681-a509921b0f24-serving-cert\") pod \"console-operator-58897d9998-854p6\" (UID: \"91599765-e650-4b7d-9681-a509921b0f24\") " pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.340472 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7647af53-61ba-409d-90c4-25d6ee0a022d-default-certificate\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.341302 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91599765-e650-4b7d-9681-a509921b0f24-config\") pod \"console-operator-58897d9998-854p6\" (UID: \"91599765-e650-4b7d-9681-a509921b0f24\") " pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.342588 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-registry-tls\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.412359 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v52ch"] Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.460320 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.460723 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/06984eba-18ca-42bf-bcd0-787f28f91d4b-srv-cert\") pod \"catalog-operator-68c6474976-d58h4\" (UID: \"06984eba-18ca-42bf-bcd0-787f28f91d4b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.460758 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqfqr\" (UniqueName: \"kubernetes.io/projected/af82926e-f43a-4d57-a5ad-edb6bef1a719-kube-api-access-wqfqr\") pod \"olm-operator-6b444d44fb-hkxr2\" (UID: \"af82926e-f43a-4d57-a5ad-edb6bef1a719\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.460791 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nfrdd\" (UID: \"ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.460822 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0411515-ac32-4ad1-a956-ce737c8d0d75-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ncmcs\" (UID: \"f0411515-ac32-4ad1-a956-ce737c8d0d75\") " pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.460853 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c229t\" (UniqueName: \"kubernetes.io/projected/5ba20b9a-0c04-416f-963f-610d9be4cef1-kube-api-access-c229t\") pod \"kube-storage-version-migrator-operator-b67b599dd-82bkj\" (UID: \"5ba20b9a-0c04-416f-963f-610d9be4cef1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.460884 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.460909 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-etcd-client\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.460933 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzmjp\" (UniqueName: \"kubernetes.io/projected/3bd5f0ac-77dd-4a9e-8343-6df2079e0355-kube-api-access-gzmjp\") pod \"machine-config-operator-74547568cd-lwb96\" (UID: \"3bd5f0ac-77dd-4a9e-8343-6df2079e0355\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.460958 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/060fcdca-499b-4e84-81a2-144a175efe6f-tmpfs\") pod \"packageserver-d55dfcdfc-hm9cc\" (UID: \"060fcdca-499b-4e84-81a2-144a175efe6f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.460984 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ba20b9a-0c04-416f-963f-610d9be4cef1-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-82bkj\" (UID: \"5ba20b9a-0c04-416f-963f-610d9be4cef1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.461014 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9186ac86-6e42-41af-a520-839e71f4c41c-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-9kn65\" (UID: \"9186ac86-6e42-41af-a520-839e71f4c41c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.461040 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jmdg\" (UniqueName: \"kubernetes.io/projected/8cedd2a3-4056-4c09-b1ab-a9596cff261b-kube-api-access-6jmdg\") pod \"dns-default-6n9t7\" (UID: \"8cedd2a3-4056-4c09-b1ab-a9596cff261b\") " pod="openshift-dns/dns-default-6n9t7" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.461070 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/7528abe2-fb27-4c14-88c6-98fcbb716395-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-dwmkj\" (UID: \"7528abe2-fb27-4c14-88c6-98fcbb716395\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dwmkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.461098 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/916e5756-f645-44f7-b26d-706a87c57ed8-audit-dir\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.461128 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bce0373-35c7-420e-a6cf-7f5bc2d1bba6-serving-cert\") pod \"openshift-config-operator-7777fb866f-n4mnv\" (UID: \"7bce0373-35c7-420e-a6cf-7f5bc2d1bba6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.461153 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7647af53-61ba-409d-90c4-25d6ee0a022d-default-certificate\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.461174 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-etcd-ca\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.461197 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91599765-e650-4b7d-9681-a509921b0f24-config\") pod \"console-operator-58897d9998-854p6\" (UID: \"91599765-e650-4b7d-9681-a509921b0f24\") " pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.461224 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xxn5\" (UniqueName: \"kubernetes.io/projected/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-kube-api-access-2xxn5\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.461254 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rqs7\" (UniqueName: \"kubernetes.io/projected/e7f326c3-8717-44a8-af72-556a8fdbce22-kube-api-access-2rqs7\") pod \"ingress-operator-5b745b69d9-rsq9z\" (UID: \"e7f326c3-8717-44a8-af72-556a8fdbce22\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.461294 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-secret-volume\") pod \"collect-profiles-29414010-ddr4k\" (UID: \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.461327 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8cedd2a3-4056-4c09-b1ab-a9596cff261b-config-volume\") pod \"dns-default-6n9t7\" (UID: \"8cedd2a3-4056-4c09-b1ab-a9596cff261b\") " pod="openshift-dns/dns-default-6n9t7" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.461375 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6db5\" (UniqueName: \"kubernetes.io/projected/085c976a-20a2-410c-b37f-1fc34eb733da-kube-api-access-b6db5\") pod \"machine-config-controller-84d6567774-8vksb\" (UID: \"085c976a-20a2-410c-b37f-1fc34eb733da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" Dec 04 09:40:38 crc kubenswrapper[4707]: E1204 09:40:38.462060 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:38.962038615 +0000 UTC m=+138.397861132 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.463459 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/916e5756-f645-44f7-b26d-706a87c57ed8-audit-dir\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.463946 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd08c265-693b-4ca4-986d-45fa202caca7-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5j2vc\" (UID: \"bd08c265-693b-4ca4-986d-45fa202caca7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.463989 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d-node-bootstrap-token\") pod \"machine-config-server-g8flk\" (UID: \"d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d\") " pod="openshift-machine-config-operator/machine-config-server-g8flk" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.464017 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0411515-ac32-4ad1-a956-ce737c8d0d75-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ncmcs\" (UID: \"f0411515-ac32-4ad1-a956-ce737c8d0d75\") " pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.464073 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/44b581fc-38bf-4c33-820c-f27a4a730932-installation-pull-secrets\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.464095 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91599765-e650-4b7d-9681-a509921b0f24-config\") pod \"console-operator-58897d9998-854p6\" (UID: \"91599765-e650-4b7d-9681-a509921b0f24\") " pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.464105 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nfrdd\" (UID: \"ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.464134 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8e0c0140-161e-4110-999f-3c8c60d481cc-signing-key\") pod \"service-ca-9c57cc56f-gfbcs\" (UID: \"8e0c0140-161e-4110-999f-3c8c60d481cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-gfbcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.464167 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-serving-cert\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.464195 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lchq2\" (UniqueName: \"kubernetes.io/projected/7bce0373-35c7-420e-a6cf-7f5bc2d1bba6-kube-api-access-lchq2\") pod \"openshift-config-operator-7777fb866f-n4mnv\" (UID: \"7bce0373-35c7-420e-a6cf-7f5bc2d1bba6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.465545 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d7614ad2-ca20-4d63-9a65-40a20bc74c8c-cert\") pod \"ingress-canary-9xnqw\" (UID: \"d7614ad2-ca20-4d63-9a65-40a20bc74c8c\") " pod="openshift-ingress-canary/ingress-canary-9xnqw" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.465895 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.466010 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/085c976a-20a2-410c-b37f-1fc34eb733da-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8vksb\" (UID: \"085c976a-20a2-410c-b37f-1fc34eb733da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.466088 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ba20b9a-0c04-416f-963f-610d9be4cef1-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-82bkj\" (UID: \"5ba20b9a-0c04-416f-963f-610d9be4cef1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.466157 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f0669320-7b9a-49e6-b24a-23e8ae1c4051-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-lrm44\" (UID: \"f0669320-7b9a-49e6-b24a-23e8ae1c4051\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lrm44" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.466246 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/060fcdca-499b-4e84-81a2-144a175efe6f-webhook-cert\") pod \"packageserver-d55dfcdfc-hm9cc\" (UID: \"060fcdca-499b-4e84-81a2-144a175efe6f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.466344 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9186ac86-6e42-41af-a520-839e71f4c41c-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-9kn65\" (UID: \"9186ac86-6e42-41af-a520-839e71f4c41c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.466459 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs24b\" (UniqueName: \"kubernetes.io/projected/822c86b6-e71b-471c-a0bc-1537af9e7c36-kube-api-access-bs24b\") pod \"migrator-59844c95c7-7287k\" (UID: \"822c86b6-e71b-471c-a0bc-1537af9e7c36\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7287k" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.466555 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.467785 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.469090 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-config-volume\") pod \"collect-profiles-29414010-ddr4k\" (UID: \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.469175 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/67f753e3-95ec-46e4-bc29-efe016b6e3f7-metrics-tls\") pod \"dns-operator-744455d44c-rkq6j\" (UID: \"67f753e3-95ec-46e4-bc29-efe016b6e3f7\") " pod="openshift-dns-operator/dns-operator-744455d44c-rkq6j" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.469272 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7f326c3-8717-44a8-af72-556a8fdbce22-trusted-ca\") pod \"ingress-operator-5b745b69d9-rsq9z\" (UID: \"e7f326c3-8717-44a8-af72-556a8fdbce22\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.469424 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42qqb\" (UniqueName: \"kubernetes.io/projected/91599765-e650-4b7d-9681-a509921b0f24-kube-api-access-42qqb\") pod \"console-operator-58897d9998-854p6\" (UID: \"91599765-e650-4b7d-9681-a509921b0f24\") " pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.469532 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/232e4fba-ab9d-46de-9d0b-7311ddd1bcab-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vn2v9\" (UID: \"232e4fba-ab9d-46de-9d0b-7311ddd1bcab\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.469850 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27p27\" (UniqueName: \"kubernetes.io/projected/06984eba-18ca-42bf-bcd0-787f28f91d4b-kube-api-access-27p27\") pod \"catalog-operator-68c6474976-d58h4\" (UID: \"06984eba-18ca-42bf-bcd0-787f28f91d4b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.469945 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3bd5f0ac-77dd-4a9e-8343-6df2079e0355-auth-proxy-config\") pod \"machine-config-operator-74547568cd-lwb96\" (UID: \"3bd5f0ac-77dd-4a9e-8343-6df2079e0355\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.470015 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e9960ba-c1e3-4209-a7d3-ff88ccf73c39-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zd7sv\" (UID: \"9e9960ba-c1e3-4209-a7d3-ff88ccf73c39\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.470090 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-audit-policies\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.470173 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d-certs\") pod \"machine-config-server-g8flk\" (UID: \"d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d\") " pod="openshift-machine-config-operator/machine-config-server-g8flk" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.468906 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.468393 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.470740 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bce0373-35c7-420e-a6cf-7f5bc2d1bba6-serving-cert\") pod \"openshift-config-operator-7777fb866f-n4mnv\" (UID: \"7bce0373-35c7-420e-a6cf-7f5bc2d1bba6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.468223 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nfrdd\" (UID: \"ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.472059 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-audit-policies\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.467912 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9186ac86-6e42-41af-a520-839e71f4c41c-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-9kn65\" (UID: \"9186ac86-6e42-41af-a520-839e71f4c41c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.473936 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p68fv\" (UniqueName: \"kubernetes.io/projected/916e5756-f645-44f7-b26d-706a87c57ed8-kube-api-access-p68fv\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474111 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78tf6\" (UniqueName: \"kubernetes.io/projected/f0669320-7b9a-49e6-b24a-23e8ae1c4051-kube-api-access-78tf6\") pod \"multus-admission-controller-857f4d67dd-lrm44\" (UID: \"f0669320-7b9a-49e6-b24a-23e8ae1c4051\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lrm44" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474287 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474421 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlxp4\" (UniqueName: \"kubernetes.io/projected/060fcdca-499b-4e84-81a2-144a175efe6f-kube-api-access-zlxp4\") pod \"packageserver-d55dfcdfc-hm9cc\" (UID: \"060fcdca-499b-4e84-81a2-144a175efe6f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474455 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/44b581fc-38bf-4c33-820c-f27a4a730932-installation-pull-secrets\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474475 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474532 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/7bce0373-35c7-420e-a6cf-7f5bc2d1bba6-available-featuregates\") pod \"openshift-config-operator-7777fb866f-n4mnv\" (UID: \"7bce0373-35c7-420e-a6cf-7f5bc2d1bba6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474565 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7f326c3-8717-44a8-af72-556a8fdbce22-bound-sa-token\") pod \"ingress-operator-5b745b69d9-rsq9z\" (UID: \"e7f326c3-8717-44a8-af72-556a8fdbce22\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474600 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474635 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/06984eba-18ca-42bf-bcd0-787f28f91d4b-profile-collector-cert\") pod \"catalog-operator-68c6474976-d58h4\" (UID: \"06984eba-18ca-42bf-bcd0-787f28f91d4b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474663 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/af82926e-f43a-4d57-a5ad-edb6bef1a719-srv-cert\") pod \"olm-operator-6b444d44fb-hkxr2\" (UID: \"af82926e-f43a-4d57-a5ad-edb6bef1a719\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474689 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/af82926e-f43a-4d57-a5ad-edb6bef1a719-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hkxr2\" (UID: \"af82926e-f43a-4d57-a5ad-edb6bef1a719\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474714 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474741 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7647af53-61ba-409d-90c4-25d6ee0a022d-service-ca-bundle\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474769 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5jkd\" (UniqueName: \"kubernetes.io/projected/f0411515-ac32-4ad1-a956-ce737c8d0d75-kube-api-access-x5jkd\") pod \"marketplace-operator-79b997595-ncmcs\" (UID: \"f0411515-ac32-4ad1-a956-ce737c8d0d75\") " pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474799 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee5d7413-ce83-4601-9e30-13ad2d2d1768-config\") pod \"service-ca-operator-777779d784-b6bzf\" (UID: \"ee5d7413-ce83-4601-9e30-13ad2d2d1768\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474827 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/91599765-e650-4b7d-9681-a509921b0f24-serving-cert\") pod \"console-operator-58897d9998-854p6\" (UID: \"91599765-e650-4b7d-9681-a509921b0f24\") " pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474858 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8e0c0140-161e-4110-999f-3c8c60d481cc-signing-cabundle\") pod \"service-ca-9c57cc56f-gfbcs\" (UID: \"8e0c0140-161e-4110-999f-3c8c60d481cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-gfbcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474878 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/7bce0373-35c7-420e-a6cf-7f5bc2d1bba6-available-featuregates\") pod \"openshift-config-operator-7777fb866f-n4mnv\" (UID: \"7bce0373-35c7-420e-a6cf-7f5bc2d1bba6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474889 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-etcd-service-ca\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474915 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bd08c265-693b-4ca4-986d-45fa202caca7-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5j2vc\" (UID: \"bd08c265-693b-4ca4-986d-45fa202caca7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474963 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e9960ba-c1e3-4209-a7d3-ff88ccf73c39-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zd7sv\" (UID: \"9e9960ba-c1e3-4209-a7d3-ff88ccf73c39\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.474991 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q85xn\" (UniqueName: \"kubernetes.io/projected/232e4fba-ab9d-46de-9d0b-7311ddd1bcab-kube-api-access-q85xn\") pod \"package-server-manager-789f6589d5-vn2v9\" (UID: \"232e4fba-ab9d-46de-9d0b-7311ddd1bcab\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475082 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee5d7413-ce83-4601-9e30-13ad2d2d1768-serving-cert\") pod \"service-ca-operator-777779d784-b6bzf\" (UID: \"ee5d7413-ce83-4601-9e30-13ad2d2d1768\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475156 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/44b581fc-38bf-4c33-820c-f27a4a730932-registry-certificates\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475189 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-registration-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475223 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcdgz\" (UniqueName: \"kubernetes.io/projected/7528abe2-fb27-4c14-88c6-98fcbb716395-kube-api-access-xcdgz\") pod \"control-plane-machine-set-operator-78cbb6b69f-dwmkj\" (UID: \"7528abe2-fb27-4c14-88c6-98fcbb716395\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dwmkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475253 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e9960ba-c1e3-4209-a7d3-ff88ccf73c39-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zd7sv\" (UID: \"9e9960ba-c1e3-4209-a7d3-ff88ccf73c39\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475301 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fjwg\" (UniqueName: \"kubernetes.io/projected/9186ac86-6e42-41af-a520-839e71f4c41c-kube-api-access-6fjwg\") pod \"openshift-controller-manager-operator-756b6f6bc6-9kn65\" (UID: \"9186ac86-6e42-41af-a520-839e71f4c41c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475331 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfsqr\" (UniqueName: \"kubernetes.io/projected/ee5d7413-ce83-4601-9e30-13ad2d2d1768-kube-api-access-jfsqr\") pod \"service-ca-operator-777779d784-b6bzf\" (UID: \"ee5d7413-ce83-4601-9e30-13ad2d2d1768\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475371 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7647af53-61ba-409d-90c4-25d6ee0a022d-stats-auth\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475398 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7647af53-61ba-409d-90c4-25d6ee0a022d-metrics-certs\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475421 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt9t4\" (UniqueName: \"kubernetes.io/projected/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-kube-api-access-nt9t4\") pod \"collect-profiles-29414010-ddr4k\" (UID: \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475451 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475478 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-mountpoint-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475491 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475508 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/060fcdca-499b-4e84-81a2-144a175efe6f-apiservice-cert\") pod \"packageserver-d55dfcdfc-hm9cc\" (UID: \"060fcdca-499b-4e84-81a2-144a175efe6f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475567 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44b581fc-38bf-4c33-820c-f27a4a730932-trusted-ca\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475595 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475634 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3bd5f0ac-77dd-4a9e-8343-6df2079e0355-proxy-tls\") pod \"machine-config-operator-74547568cd-lwb96\" (UID: \"3bd5f0ac-77dd-4a9e-8343-6df2079e0355\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475720 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nfrdd\" (UID: \"ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475752 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd08c265-693b-4ca4-986d-45fa202caca7-config\") pod \"kube-controller-manager-operator-78b949d7b-5j2vc\" (UID: \"bd08c265-693b-4ca4-986d-45fa202caca7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475818 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475848 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8cedd2a3-4056-4c09-b1ab-a9596cff261b-metrics-tls\") pod \"dns-default-6n9t7\" (UID: \"8cedd2a3-4056-4c09-b1ab-a9596cff261b\") " pod="openshift-dns/dns-default-6n9t7" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475906 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpkfd\" (UniqueName: \"kubernetes.io/projected/d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d-kube-api-access-dpkfd\") pod \"machine-config-server-g8flk\" (UID: \"d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d\") " pod="openshift-machine-config-operator/machine-config-server-g8flk" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475946 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-567qw\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-kube-api-access-567qw\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.475982 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3bd5f0ac-77dd-4a9e-8343-6df2079e0355-images\") pod \"machine-config-operator-74547568cd-lwb96\" (UID: \"3bd5f0ac-77dd-4a9e-8343-6df2079e0355\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.476015 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-csi-data-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.476069 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.476163 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bk28l\" (UniqueName: \"kubernetes.io/projected/8e0c0140-161e-4110-999f-3c8c60d481cc-kube-api-access-bk28l\") pod \"service-ca-9c57cc56f-gfbcs\" (UID: \"8e0c0140-161e-4110-999f-3c8c60d481cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-gfbcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.476256 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbh2s\" (UniqueName: \"kubernetes.io/projected/67f753e3-95ec-46e4-bc29-efe016b6e3f7-kube-api-access-qbh2s\") pod \"dns-operator-744455d44c-rkq6j\" (UID: \"67f753e3-95ec-46e4-bc29-efe016b6e3f7\") " pod="openshift-dns-operator/dns-operator-744455d44c-rkq6j" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.476465 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-config\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.476567 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/91599765-e650-4b7d-9681-a509921b0f24-trusted-ca\") pod \"console-operator-58897d9998-854p6\" (UID: \"91599765-e650-4b7d-9681-a509921b0f24\") " pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.476642 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-plugins-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.476719 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e7f326c3-8717-44a8-af72-556a8fdbce22-metrics-tls\") pod \"ingress-operator-5b745b69d9-rsq9z\" (UID: \"e7f326c3-8717-44a8-af72-556a8fdbce22\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.476829 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-socket-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.476914 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4flj\" (UniqueName: \"kubernetes.io/projected/ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a-kube-api-access-t4flj\") pod \"cluster-image-registry-operator-dc59b4c8b-nfrdd\" (UID: \"ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.476995 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgpxr\" (UniqueName: \"kubernetes.io/projected/2868c6d2-127b-41f0-8d2a-c602d45c339f-kube-api-access-pgpxr\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.477069 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/085c976a-20a2-410c-b37f-1fc34eb733da-proxy-tls\") pod \"machine-config-controller-84d6567774-8vksb\" (UID: \"085c976a-20a2-410c-b37f-1fc34eb733da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.477163 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqjvg\" (UniqueName: \"kubernetes.io/projected/d7614ad2-ca20-4d63-9a65-40a20bc74c8c-kube-api-access-kqjvg\") pod \"ingress-canary-9xnqw\" (UID: \"d7614ad2-ca20-4d63-9a65-40a20bc74c8c\") " pod="openshift-ingress-canary/ingress-canary-9xnqw" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.477261 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-bound-sa-token\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.477381 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5bb9\" (UniqueName: \"kubernetes.io/projected/7647af53-61ba-409d-90c4-25d6ee0a022d-kube-api-access-w5bb9\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.476318 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7647af53-61ba-409d-90c4-25d6ee0a022d-service-ca-bundle\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: E1204 09:40:38.479000 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:38.978978158 +0000 UTC m=+138.414800665 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.480355 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/44b581fc-38bf-4c33-820c-f27a4a730932-registry-certificates\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.480753 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/91599765-e650-4b7d-9681-a509921b0f24-trusted-ca\") pod \"console-operator-58897d9998-854p6\" (UID: \"91599765-e650-4b7d-9681-a509921b0f24\") " pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.480824 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44b581fc-38bf-4c33-820c-f27a4a730932-trusted-ca\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.483491 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nfrdd\" (UID: \"ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.488058 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.488075 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9186ac86-6e42-41af-a520-839e71f4c41c-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-9kn65\" (UID: \"9186ac86-6e42-41af-a520-839e71f4c41c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.488962 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.492893 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7647af53-61ba-409d-90c4-25d6ee0a022d-default-certificate\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.493277 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.493471 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7647af53-61ba-409d-90c4-25d6ee0a022d-stats-auth\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.499740 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.500133 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7647af53-61ba-409d-90c4-25d6ee0a022d-metrics-certs\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.500147 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.500459 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/91599765-e650-4b7d-9681-a509921b0f24-serving-cert\") pod \"console-operator-58897d9998-854p6\" (UID: \"91599765-e650-4b7d-9681-a509921b0f24\") " pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.500472 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.501203 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.501687 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.515482 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lchq2\" (UniqueName: \"kubernetes.io/projected/7bce0373-35c7-420e-a6cf-7f5bc2d1bba6-kube-api-access-lchq2\") pod \"openshift-config-operator-7777fb866f-n4mnv\" (UID: \"7bce0373-35c7-420e-a6cf-7f5bc2d1bba6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.535681 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42qqb\" (UniqueName: \"kubernetes.io/projected/91599765-e650-4b7d-9681-a509921b0f24-kube-api-access-42qqb\") pod \"console-operator-58897d9998-854p6\" (UID: \"91599765-e650-4b7d-9681-a509921b0f24\") " pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.563236 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p68fv\" (UniqueName: \"kubernetes.io/projected/916e5756-f645-44f7-b26d-706a87c57ed8-kube-api-access-p68fv\") pod \"oauth-openshift-558db77b4-rttms\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.579733 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.579839 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fjwg\" (UniqueName: \"kubernetes.io/projected/9186ac86-6e42-41af-a520-839e71f4c41c-kube-api-access-6fjwg\") pod \"openshift-controller-manager-operator-756b6f6bc6-9kn65\" (UID: \"9186ac86-6e42-41af-a520-839e71f4c41c\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.580556 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.580859 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0411515-ac32-4ad1-a956-ce737c8d0d75-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ncmcs\" (UID: \"f0411515-ac32-4ad1-a956-ce737c8d0d75\") " pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.580886 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd08c265-693b-4ca4-986d-45fa202caca7-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5j2vc\" (UID: \"bd08c265-693b-4ca4-986d-45fa202caca7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.580906 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d-node-bootstrap-token\") pod \"machine-config-server-g8flk\" (UID: \"d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d\") " pod="openshift-machine-config-operator/machine-config-server-g8flk" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.580929 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8e0c0140-161e-4110-999f-3c8c60d481cc-signing-key\") pod \"service-ca-9c57cc56f-gfbcs\" (UID: \"8e0c0140-161e-4110-999f-3c8c60d481cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-gfbcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.580946 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-serving-cert\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.580963 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d7614ad2-ca20-4d63-9a65-40a20bc74c8c-cert\") pod \"ingress-canary-9xnqw\" (UID: \"d7614ad2-ca20-4d63-9a65-40a20bc74c8c\") " pod="openshift-ingress-canary/ingress-canary-9xnqw" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.580981 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/085c976a-20a2-410c-b37f-1fc34eb733da-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8vksb\" (UID: \"085c976a-20a2-410c-b37f-1fc34eb733da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581000 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ba20b9a-0c04-416f-963f-610d9be4cef1-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-82bkj\" (UID: \"5ba20b9a-0c04-416f-963f-610d9be4cef1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581018 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f0669320-7b9a-49e6-b24a-23e8ae1c4051-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-lrm44\" (UID: \"f0669320-7b9a-49e6-b24a-23e8ae1c4051\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lrm44" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581034 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs24b\" (UniqueName: \"kubernetes.io/projected/822c86b6-e71b-471c-a0bc-1537af9e7c36-kube-api-access-bs24b\") pod \"migrator-59844c95c7-7287k\" (UID: \"822c86b6-e71b-471c-a0bc-1537af9e7c36\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7287k" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581051 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/060fcdca-499b-4e84-81a2-144a175efe6f-webhook-cert\") pod \"packageserver-d55dfcdfc-hm9cc\" (UID: \"060fcdca-499b-4e84-81a2-144a175efe6f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581071 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-config-volume\") pod \"collect-profiles-29414010-ddr4k\" (UID: \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581086 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/67f753e3-95ec-46e4-bc29-efe016b6e3f7-metrics-tls\") pod \"dns-operator-744455d44c-rkq6j\" (UID: \"67f753e3-95ec-46e4-bc29-efe016b6e3f7\") " pod="openshift-dns-operator/dns-operator-744455d44c-rkq6j" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581102 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7f326c3-8717-44a8-af72-556a8fdbce22-trusted-ca\") pod \"ingress-operator-5b745b69d9-rsq9z\" (UID: \"e7f326c3-8717-44a8-af72-556a8fdbce22\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581134 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/232e4fba-ab9d-46de-9d0b-7311ddd1bcab-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vn2v9\" (UID: \"232e4fba-ab9d-46de-9d0b-7311ddd1bcab\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581153 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27p27\" (UniqueName: \"kubernetes.io/projected/06984eba-18ca-42bf-bcd0-787f28f91d4b-kube-api-access-27p27\") pod \"catalog-operator-68c6474976-d58h4\" (UID: \"06984eba-18ca-42bf-bcd0-787f28f91d4b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581170 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3bd5f0ac-77dd-4a9e-8343-6df2079e0355-auth-proxy-config\") pod \"machine-config-operator-74547568cd-lwb96\" (UID: \"3bd5f0ac-77dd-4a9e-8343-6df2079e0355\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581187 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e9960ba-c1e3-4209-a7d3-ff88ccf73c39-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zd7sv\" (UID: \"9e9960ba-c1e3-4209-a7d3-ff88ccf73c39\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581201 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d-certs\") pod \"machine-config-server-g8flk\" (UID: \"d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d\") " pod="openshift-machine-config-operator/machine-config-server-g8flk" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581223 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78tf6\" (UniqueName: \"kubernetes.io/projected/f0669320-7b9a-49e6-b24a-23e8ae1c4051-kube-api-access-78tf6\") pod \"multus-admission-controller-857f4d67dd-lrm44\" (UID: \"f0669320-7b9a-49e6-b24a-23e8ae1c4051\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lrm44" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581247 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlxp4\" (UniqueName: \"kubernetes.io/projected/060fcdca-499b-4e84-81a2-144a175efe6f-kube-api-access-zlxp4\") pod \"packageserver-d55dfcdfc-hm9cc\" (UID: \"060fcdca-499b-4e84-81a2-144a175efe6f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581263 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7f326c3-8717-44a8-af72-556a8fdbce22-bound-sa-token\") pod \"ingress-operator-5b745b69d9-rsq9z\" (UID: \"e7f326c3-8717-44a8-af72-556a8fdbce22\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581279 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/af82926e-f43a-4d57-a5ad-edb6bef1a719-srv-cert\") pod \"olm-operator-6b444d44fb-hkxr2\" (UID: \"af82926e-f43a-4d57-a5ad-edb6bef1a719\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581294 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/06984eba-18ca-42bf-bcd0-787f28f91d4b-profile-collector-cert\") pod \"catalog-operator-68c6474976-d58h4\" (UID: \"06984eba-18ca-42bf-bcd0-787f28f91d4b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581311 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/af82926e-f43a-4d57-a5ad-edb6bef1a719-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hkxr2\" (UID: \"af82926e-f43a-4d57-a5ad-edb6bef1a719\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581327 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5jkd\" (UniqueName: \"kubernetes.io/projected/f0411515-ac32-4ad1-a956-ce737c8d0d75-kube-api-access-x5jkd\") pod \"marketplace-operator-79b997595-ncmcs\" (UID: \"f0411515-ac32-4ad1-a956-ce737c8d0d75\") " pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581373 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee5d7413-ce83-4601-9e30-13ad2d2d1768-config\") pod \"service-ca-operator-777779d784-b6bzf\" (UID: \"ee5d7413-ce83-4601-9e30-13ad2d2d1768\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581390 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8e0c0140-161e-4110-999f-3c8c60d481cc-signing-cabundle\") pod \"service-ca-9c57cc56f-gfbcs\" (UID: \"8e0c0140-161e-4110-999f-3c8c60d481cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-gfbcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581406 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q85xn\" (UniqueName: \"kubernetes.io/projected/232e4fba-ab9d-46de-9d0b-7311ddd1bcab-kube-api-access-q85xn\") pod \"package-server-manager-789f6589d5-vn2v9\" (UID: \"232e4fba-ab9d-46de-9d0b-7311ddd1bcab\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581421 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-etcd-service-ca\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581436 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bd08c265-693b-4ca4-986d-45fa202caca7-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5j2vc\" (UID: \"bd08c265-693b-4ca4-986d-45fa202caca7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581450 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e9960ba-c1e3-4209-a7d3-ff88ccf73c39-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zd7sv\" (UID: \"9e9960ba-c1e3-4209-a7d3-ff88ccf73c39\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581475 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee5d7413-ce83-4601-9e30-13ad2d2d1768-serving-cert\") pod \"service-ca-operator-777779d784-b6bzf\" (UID: \"ee5d7413-ce83-4601-9e30-13ad2d2d1768\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581495 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-registration-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581512 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcdgz\" (UniqueName: \"kubernetes.io/projected/7528abe2-fb27-4c14-88c6-98fcbb716395-kube-api-access-xcdgz\") pod \"control-plane-machine-set-operator-78cbb6b69f-dwmkj\" (UID: \"7528abe2-fb27-4c14-88c6-98fcbb716395\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dwmkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581529 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e9960ba-c1e3-4209-a7d3-ff88ccf73c39-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zd7sv\" (UID: \"9e9960ba-c1e3-4209-a7d3-ff88ccf73c39\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581547 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt9t4\" (UniqueName: \"kubernetes.io/projected/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-kube-api-access-nt9t4\") pod \"collect-profiles-29414010-ddr4k\" (UID: \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581563 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfsqr\" (UniqueName: \"kubernetes.io/projected/ee5d7413-ce83-4601-9e30-13ad2d2d1768-kube-api-access-jfsqr\") pod \"service-ca-operator-777779d784-b6bzf\" (UID: \"ee5d7413-ce83-4601-9e30-13ad2d2d1768\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581579 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-mountpoint-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581594 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/060fcdca-499b-4e84-81a2-144a175efe6f-apiservice-cert\") pod \"packageserver-d55dfcdfc-hm9cc\" (UID: \"060fcdca-499b-4e84-81a2-144a175efe6f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581610 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3bd5f0ac-77dd-4a9e-8343-6df2079e0355-proxy-tls\") pod \"machine-config-operator-74547568cd-lwb96\" (UID: \"3bd5f0ac-77dd-4a9e-8343-6df2079e0355\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581632 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd08c265-693b-4ca4-986d-45fa202caca7-config\") pod \"kube-controller-manager-operator-78b949d7b-5j2vc\" (UID: \"bd08c265-693b-4ca4-986d-45fa202caca7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581650 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8cedd2a3-4056-4c09-b1ab-a9596cff261b-metrics-tls\") pod \"dns-default-6n9t7\" (UID: \"8cedd2a3-4056-4c09-b1ab-a9596cff261b\") " pod="openshift-dns/dns-default-6n9t7" Dec 04 09:40:38 crc kubenswrapper[4707]: E1204 09:40:38.581692 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:39.081672216 +0000 UTC m=+138.517494923 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581740 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpkfd\" (UniqueName: \"kubernetes.io/projected/d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d-kube-api-access-dpkfd\") pod \"machine-config-server-g8flk\" (UID: \"d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d\") " pod="openshift-machine-config-operator/machine-config-server-g8flk" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581777 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3bd5f0ac-77dd-4a9e-8343-6df2079e0355-images\") pod \"machine-config-operator-74547568cd-lwb96\" (UID: \"3bd5f0ac-77dd-4a9e-8343-6df2079e0355\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581799 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-csi-data-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581825 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bk28l\" (UniqueName: \"kubernetes.io/projected/8e0c0140-161e-4110-999f-3c8c60d481cc-kube-api-access-bk28l\") pod \"service-ca-9c57cc56f-gfbcs\" (UID: \"8e0c0140-161e-4110-999f-3c8c60d481cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-gfbcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581849 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbh2s\" (UniqueName: \"kubernetes.io/projected/67f753e3-95ec-46e4-bc29-efe016b6e3f7-kube-api-access-qbh2s\") pod \"dns-operator-744455d44c-rkq6j\" (UID: \"67f753e3-95ec-46e4-bc29-efe016b6e3f7\") " pod="openshift-dns-operator/dns-operator-744455d44c-rkq6j" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581889 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-config\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581916 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-plugins-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581939 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e7f326c3-8717-44a8-af72-556a8fdbce22-metrics-tls\") pod \"ingress-operator-5b745b69d9-rsq9z\" (UID: \"e7f326c3-8717-44a8-af72-556a8fdbce22\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.581995 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-socket-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.582021 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgpxr\" (UniqueName: \"kubernetes.io/projected/2868c6d2-127b-41f0-8d2a-c602d45c339f-kube-api-access-pgpxr\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583274 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/085c976a-20a2-410c-b37f-1fc34eb733da-proxy-tls\") pod \"machine-config-controller-84d6567774-8vksb\" (UID: \"085c976a-20a2-410c-b37f-1fc34eb733da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583303 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqjvg\" (UniqueName: \"kubernetes.io/projected/d7614ad2-ca20-4d63-9a65-40a20bc74c8c-kube-api-access-kqjvg\") pod \"ingress-canary-9xnqw\" (UID: \"d7614ad2-ca20-4d63-9a65-40a20bc74c8c\") " pod="openshift-ingress-canary/ingress-canary-9xnqw" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583382 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0411515-ac32-4ad1-a956-ce737c8d0d75-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ncmcs\" (UID: \"f0411515-ac32-4ad1-a956-ce737c8d0d75\") " pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583409 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/06984eba-18ca-42bf-bcd0-787f28f91d4b-srv-cert\") pod \"catalog-operator-68c6474976-d58h4\" (UID: \"06984eba-18ca-42bf-bcd0-787f28f91d4b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583438 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqfqr\" (UniqueName: \"kubernetes.io/projected/af82926e-f43a-4d57-a5ad-edb6bef1a719-kube-api-access-wqfqr\") pod \"olm-operator-6b444d44fb-hkxr2\" (UID: \"af82926e-f43a-4d57-a5ad-edb6bef1a719\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583463 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c229t\" (UniqueName: \"kubernetes.io/projected/5ba20b9a-0c04-416f-963f-610d9be4cef1-kube-api-access-c229t\") pod \"kube-storage-version-migrator-operator-b67b599dd-82bkj\" (UID: \"5ba20b9a-0c04-416f-963f-610d9be4cef1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583495 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzmjp\" (UniqueName: \"kubernetes.io/projected/3bd5f0ac-77dd-4a9e-8343-6df2079e0355-kube-api-access-gzmjp\") pod \"machine-config-operator-74547568cd-lwb96\" (UID: \"3bd5f0ac-77dd-4a9e-8343-6df2079e0355\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583517 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/060fcdca-499b-4e84-81a2-144a175efe6f-tmpfs\") pod \"packageserver-d55dfcdfc-hm9cc\" (UID: \"060fcdca-499b-4e84-81a2-144a175efe6f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583539 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-etcd-client\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583647 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ba20b9a-0c04-416f-963f-610d9be4cef1-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-82bkj\" (UID: \"5ba20b9a-0c04-416f-963f-610d9be4cef1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583675 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jmdg\" (UniqueName: \"kubernetes.io/projected/8cedd2a3-4056-4c09-b1ab-a9596cff261b-kube-api-access-6jmdg\") pod \"dns-default-6n9t7\" (UID: \"8cedd2a3-4056-4c09-b1ab-a9596cff261b\") " pod="openshift-dns/dns-default-6n9t7" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583701 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/7528abe2-fb27-4c14-88c6-98fcbb716395-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-dwmkj\" (UID: \"7528abe2-fb27-4c14-88c6-98fcbb716395\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dwmkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583730 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-etcd-ca\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583755 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xxn5\" (UniqueName: \"kubernetes.io/projected/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-kube-api-access-2xxn5\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583778 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-secret-volume\") pod \"collect-profiles-29414010-ddr4k\" (UID: \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583798 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8cedd2a3-4056-4c09-b1ab-a9596cff261b-config-volume\") pod \"dns-default-6n9t7\" (UID: \"8cedd2a3-4056-4c09-b1ab-a9596cff261b\") " pod="openshift-dns/dns-default-6n9t7" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583819 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rqs7\" (UniqueName: \"kubernetes.io/projected/e7f326c3-8717-44a8-af72-556a8fdbce22-kube-api-access-2rqs7\") pod \"ingress-operator-5b745b69d9-rsq9z\" (UID: \"e7f326c3-8717-44a8-af72-556a8fdbce22\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.583845 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6db5\" (UniqueName: \"kubernetes.io/projected/085c976a-20a2-410c-b37f-1fc34eb733da-kube-api-access-b6db5\") pod \"machine-config-controller-84d6567774-8vksb\" (UID: \"085c976a-20a2-410c-b37f-1fc34eb733da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.586087 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3bd5f0ac-77dd-4a9e-8343-6df2079e0355-auth-proxy-config\") pod \"machine-config-operator-74547568cd-lwb96\" (UID: \"3bd5f0ac-77dd-4a9e-8343-6df2079e0355\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.587467 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8cedd2a3-4056-4c09-b1ab-a9596cff261b-metrics-tls\") pod \"dns-default-6n9t7\" (UID: \"8cedd2a3-4056-4c09-b1ab-a9596cff261b\") " pod="openshift-dns/dns-default-6n9t7" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.588007 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-etcd-service-ca\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.588066 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.582870 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-config-volume\") pod \"collect-profiles-29414010-ddr4k\" (UID: \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.588432 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0411515-ac32-4ad1-a956-ce737c8d0d75-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ncmcs\" (UID: \"f0411515-ac32-4ad1-a956-ce737c8d0d75\") " pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.588499 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e9960ba-c1e3-4209-a7d3-ff88ccf73c39-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zd7sv\" (UID: \"9e9960ba-c1e3-4209-a7d3-ff88ccf73c39\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.589667 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/67f753e3-95ec-46e4-bc29-efe016b6e3f7-metrics-tls\") pod \"dns-operator-744455d44c-rkq6j\" (UID: \"67f753e3-95ec-46e4-bc29-efe016b6e3f7\") " pod="openshift-dns-operator/dns-operator-744455d44c-rkq6j" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.590103 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/060fcdca-499b-4e84-81a2-144a175efe6f-tmpfs\") pod \"packageserver-d55dfcdfc-hm9cc\" (UID: \"060fcdca-499b-4e84-81a2-144a175efe6f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.590432 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0411515-ac32-4ad1-a956-ce737c8d0d75-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ncmcs\" (UID: \"f0411515-ac32-4ad1-a956-ce737c8d0d75\") " pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.591089 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-registration-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.591215 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-mountpoint-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.591760 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e9960ba-c1e3-4209-a7d3-ff88ccf73c39-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zd7sv\" (UID: \"9e9960ba-c1e3-4209-a7d3-ff88ccf73c39\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.591847 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8e0c0140-161e-4110-999f-3c8c60d481cc-signing-cabundle\") pod \"service-ca-9c57cc56f-gfbcs\" (UID: \"8e0c0140-161e-4110-999f-3c8c60d481cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-gfbcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.592344 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8cedd2a3-4056-4c09-b1ab-a9596cff261b-config-volume\") pod \"dns-default-6n9t7\" (UID: \"8cedd2a3-4056-4c09-b1ab-a9596cff261b\") " pod="openshift-dns/dns-default-6n9t7" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.592381 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5bb9\" (UniqueName: \"kubernetes.io/projected/7647af53-61ba-409d-90c4-25d6ee0a022d-kube-api-access-w5bb9\") pod \"router-default-5444994796-8hd98\" (UID: \"7647af53-61ba-409d-90c4-25d6ee0a022d\") " pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.592496 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd08c265-693b-4ca4-986d-45fa202caca7-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5j2vc\" (UID: \"bd08c265-693b-4ca4-986d-45fa202caca7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.592844 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ba20b9a-0c04-416f-963f-610d9be4cef1-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-82bkj\" (UID: \"5ba20b9a-0c04-416f-963f-610d9be4cef1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.592907 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/085c976a-20a2-410c-b37f-1fc34eb733da-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8vksb\" (UID: \"085c976a-20a2-410c-b37f-1fc34eb733da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.593107 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd08c265-693b-4ca4-986d-45fa202caca7-config\") pod \"kube-controller-manager-operator-78b949d7b-5j2vc\" (UID: \"bd08c265-693b-4ca4-986d-45fa202caca7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.593221 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/06984eba-18ca-42bf-bcd0-787f28f91d4b-srv-cert\") pod \"catalog-operator-68c6474976-d58h4\" (UID: \"06984eba-18ca-42bf-bcd0-787f28f91d4b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.593375 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-csi-data-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.595345 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3bd5f0ac-77dd-4a9e-8343-6df2079e0355-proxy-tls\") pod \"machine-config-operator-74547568cd-lwb96\" (UID: \"3bd5f0ac-77dd-4a9e-8343-6df2079e0355\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.596109 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-secret-volume\") pod \"collect-profiles-29414010-ddr4k\" (UID: \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.596261 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-socket-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.596388 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ba20b9a-0c04-416f-963f-610d9be4cef1-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-82bkj\" (UID: \"5ba20b9a-0c04-416f-963f-610d9be4cef1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.596422 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2868c6d2-127b-41f0-8d2a-c602d45c339f-plugins-dir\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.596596 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d-certs\") pod \"machine-config-server-g8flk\" (UID: \"d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d\") " pod="openshift-machine-config-operator/machine-config-server-g8flk" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.596778 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee5d7413-ce83-4601-9e30-13ad2d2d1768-config\") pod \"service-ca-operator-777779d784-b6bzf\" (UID: \"ee5d7413-ce83-4601-9e30-13ad2d2d1768\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.597138 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/06984eba-18ca-42bf-bcd0-787f28f91d4b-profile-collector-cert\") pod \"catalog-operator-68c6474976-d58h4\" (UID: \"06984eba-18ca-42bf-bcd0-787f28f91d4b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.598623 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/060fcdca-499b-4e84-81a2-144a175efe6f-webhook-cert\") pod \"packageserver-d55dfcdfc-hm9cc\" (UID: \"060fcdca-499b-4e84-81a2-144a175efe6f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.598679 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/af82926e-f43a-4d57-a5ad-edb6bef1a719-srv-cert\") pod \"olm-operator-6b444d44fb-hkxr2\" (UID: \"af82926e-f43a-4d57-a5ad-edb6bef1a719\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.599068 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/085c976a-20a2-410c-b37f-1fc34eb733da-proxy-tls\") pod \"machine-config-controller-84d6567774-8vksb\" (UID: \"085c976a-20a2-410c-b37f-1fc34eb733da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.599440 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/af82926e-f43a-4d57-a5ad-edb6bef1a719-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hkxr2\" (UID: \"af82926e-f43a-4d57-a5ad-edb6bef1a719\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.599859 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3bd5f0ac-77dd-4a9e-8343-6df2079e0355-images\") pod \"machine-config-operator-74547568cd-lwb96\" (UID: \"3bd5f0ac-77dd-4a9e-8343-6df2079e0355\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.600130 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e7f326c3-8717-44a8-af72-556a8fdbce22-metrics-tls\") pod \"ingress-operator-5b745b69d9-rsq9z\" (UID: \"e7f326c3-8717-44a8-af72-556a8fdbce22\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.600484 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7f326c3-8717-44a8-af72-556a8fdbce22-trusted-ca\") pod \"ingress-operator-5b745b69d9-rsq9z\" (UID: \"e7f326c3-8717-44a8-af72-556a8fdbce22\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.600584 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8e0c0140-161e-4110-999f-3c8c60d481cc-signing-key\") pod \"service-ca-9c57cc56f-gfbcs\" (UID: \"8e0c0140-161e-4110-999f-3c8c60d481cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-gfbcs" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.601098 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f0669320-7b9a-49e6-b24a-23e8ae1c4051-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-lrm44\" (UID: \"f0669320-7b9a-49e6-b24a-23e8ae1c4051\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lrm44" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.601450 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d-node-bootstrap-token\") pod \"machine-config-server-g8flk\" (UID: \"d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d\") " pod="openshift-machine-config-operator/machine-config-server-g8flk" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.602007 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/232e4fba-ab9d-46de-9d0b-7311ddd1bcab-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vn2v9\" (UID: \"232e4fba-ab9d-46de-9d0b-7311ddd1bcab\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.601779 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee5d7413-ce83-4601-9e30-13ad2d2d1768-serving-cert\") pod \"service-ca-operator-777779d784-b6bzf\" (UID: \"ee5d7413-ce83-4601-9e30-13ad2d2d1768\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.602871 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/060fcdca-499b-4e84-81a2-144a175efe6f-apiservice-cert\") pod \"packageserver-d55dfcdfc-hm9cc\" (UID: \"060fcdca-499b-4e84-81a2-144a175efe6f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.608198 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d7614ad2-ca20-4d63-9a65-40a20bc74c8c-cert\") pod \"ingress-canary-9xnqw\" (UID: \"d7614ad2-ca20-4d63-9a65-40a20bc74c8c\") " pod="openshift-ingress-canary/ingress-canary-9xnqw" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.611885 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nfrdd\" (UID: \"ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.625910 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.628374 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4flj\" (UniqueName: \"kubernetes.io/projected/ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a-kube-api-access-t4flj\") pod \"cluster-image-registry-operator-dc59b4c8b-nfrdd\" (UID: \"ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.650098 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-bound-sa-token\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.669841 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-567qw\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-kube-api-access-567qw\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.702577 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: E1204 09:40:38.702955 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:39.202939462 +0000 UTC m=+138.638761969 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.718625 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27p27\" (UniqueName: \"kubernetes.io/projected/06984eba-18ca-42bf-bcd0-787f28f91d4b-kube-api-access-27p27\") pod \"catalog-operator-68c6474976-d58h4\" (UID: \"06984eba-18ca-42bf-bcd0-787f28f91d4b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.728616 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6db5\" (UniqueName: \"kubernetes.io/projected/085c976a-20a2-410c-b37f-1fc34eb733da-kube-api-access-b6db5\") pod \"machine-config-controller-84d6567774-8vksb\" (UID: \"085c976a-20a2-410c-b37f-1fc34eb733da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.749320 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.767432 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bd08c265-693b-4ca4-986d-45fa202caca7-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5j2vc\" (UID: \"bd08c265-693b-4ca4-986d-45fa202caca7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.788782 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e9960ba-c1e3-4209-a7d3-ff88ccf73c39-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zd7sv\" (UID: \"9e9960ba-c1e3-4209-a7d3-ff88ccf73c39\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.804541 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:38 crc kubenswrapper[4707]: E1204 09:40:38.804751 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:39.30472198 +0000 UTC m=+138.740544487 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.805484 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:38 crc kubenswrapper[4707]: E1204 09:40:38.805781 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:39.305771605 +0000 UTC m=+138.741594282 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.807160 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.815915 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.830146 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-g47v2"] Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.832635 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5"] Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.834030 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7th45"] Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.852090 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jmdg\" (UniqueName: \"kubernetes.io/projected/8cedd2a3-4056-4c09-b1ab-a9596cff261b-kube-api-access-6jmdg\") pod \"dns-default-6n9t7\" (UID: \"8cedd2a3-4056-4c09-b1ab-a9596cff261b\") " pod="openshift-dns/dns-default-6n9t7" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.871974 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzmjp\" (UniqueName: \"kubernetes.io/projected/3bd5f0ac-77dd-4a9e-8343-6df2079e0355-kube-api-access-gzmjp\") pod \"machine-config-operator-74547568cd-lwb96\" (UID: \"3bd5f0ac-77dd-4a9e-8343-6df2079e0355\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.881992 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl"] Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.882629 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/7528abe2-fb27-4c14-88c6-98fcbb716395-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-dwmkj\" (UID: \"7528abe2-fb27-4c14-88c6-98fcbb716395\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dwmkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.882788 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-etcd-ca\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.882834 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-56ncv"] Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.884050 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-config\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.884313 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t"] Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.886008 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c229t\" (UniqueName: \"kubernetes.io/projected/5ba20b9a-0c04-416f-963f-610d9be4cef1-kube-api-access-c229t\") pod \"kube-storage-version-migrator-operator-b67b599dd-82bkj\" (UID: \"5ba20b9a-0c04-416f-963f-610d9be4cef1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.886431 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xxn5\" (UniqueName: \"kubernetes.io/projected/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-kube-api-access-2xxn5\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.886739 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-etcd-client\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.887136 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q85xn\" (UniqueName: \"kubernetes.io/projected/232e4fba-ab9d-46de-9d0b-7311ddd1bcab-kube-api-access-q85xn\") pod \"package-server-manager-789f6589d5-vn2v9\" (UID: \"232e4fba-ab9d-46de-9d0b-7311ddd1bcab\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9" Dec 04 09:40:38 crc kubenswrapper[4707]: W1204 09:40:38.890107 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0436692e_40df_4130_8bd1_2059aeeeac11.slice/crio-1bf359e6d03cc5d09d01810562709cf895c40f4ea14cb2500a212d3e424381e9 WatchSource:0}: Error finding container 1bf359e6d03cc5d09d01810562709cf895c40f4ea14cb2500a212d3e424381e9: Status 404 returned error can't find the container with id 1bf359e6d03cc5d09d01810562709cf895c40f4ea14cb2500a212d3e424381e9 Dec 04 09:40:38 crc kubenswrapper[4707]: W1204 09:40:38.894395 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod53f6fd9a_495b_4e68_9f43_6788ff997184.slice/crio-4e5bd54e32e06d2189560f03d04291a096141bdacd036e3cd92263d9e16e274e WatchSource:0}: Error finding container 4e5bd54e32e06d2189560f03d04291a096141bdacd036e3cd92263d9e16e274e: Status 404 returned error can't find the container with id 4e5bd54e32e06d2189560f03d04291a096141bdacd036e3cd92263d9e16e274e Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.894754 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5-serving-cert\") pod \"etcd-operator-b45778765-9847h\" (UID: \"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.898217 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt9t4\" (UniqueName: \"kubernetes.io/projected/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-kube-api-access-nt9t4\") pod \"collect-profiles-29414010-ddr4k\" (UID: \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.906231 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:38 crc kubenswrapper[4707]: E1204 09:40:38.906755 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:39.406736728 +0000 UTC m=+138.842559245 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.908652 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.911999 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcdgz\" (UniqueName: \"kubernetes.io/projected/7528abe2-fb27-4c14-88c6-98fcbb716395-kube-api-access-xcdgz\") pod \"control-plane-machine-set-operator-78cbb6b69f-dwmkj\" (UID: \"7528abe2-fb27-4c14-88c6-98fcbb716395\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dwmkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.930840 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfsqr\" (UniqueName: \"kubernetes.io/projected/ee5d7413-ce83-4601-9e30-13ad2d2d1768-kube-api-access-jfsqr\") pod \"service-ca-operator-777779d784-b6bzf\" (UID: \"ee5d7413-ce83-4601-9e30-13ad2d2d1768\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.934953 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.952694 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rqs7\" (UniqueName: \"kubernetes.io/projected/e7f326c3-8717-44a8-af72-556a8fdbce22-kube-api-access-2rqs7\") pod \"ingress-operator-5b745b69d9-rsq9z\" (UID: \"e7f326c3-8717-44a8-af72-556a8fdbce22\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" Dec 04 09:40:38 crc kubenswrapper[4707]: W1204 09:40:38.965575 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7647af53_61ba_409d_90c4_25d6ee0a022d.slice/crio-f3b7bd60c7f882268727b8fb03e6cbcebaf6dfb5ce99e2e43141fa064484f683 WatchSource:0}: Error finding container f3b7bd60c7f882268727b8fb03e6cbcebaf6dfb5ce99e2e43141fa064484f683: Status 404 returned error can't find the container with id f3b7bd60c7f882268727b8fb03e6cbcebaf6dfb5ce99e2e43141fa064484f683 Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.967484 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.971937 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlxp4\" (UniqueName: \"kubernetes.io/projected/060fcdca-499b-4e84-81a2-144a175efe6f-kube-api-access-zlxp4\") pod \"packageserver-d55dfcdfc-hm9cc\" (UID: \"060fcdca-499b-4e84-81a2-144a175efe6f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.983967 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dwmkj" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.991489 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.995495 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs24b\" (UniqueName: \"kubernetes.io/projected/822c86b6-e71b-471c-a0bc-1537af9e7c36-kube-api-access-bs24b\") pod \"migrator-59844c95c7-7287k\" (UID: \"822c86b6-e71b-471c-a0bc-1537af9e7c36\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7287k" Dec 04 09:40:38 crc kubenswrapper[4707]: I1204 09:40:38.999064 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.007675 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.008310 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.008715 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:39.508702032 +0000 UTC m=+138.944524549 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.017090 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.022790 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqfqr\" (UniqueName: \"kubernetes.io/projected/af82926e-f43a-4d57-a5ad-edb6bef1a719-kube-api-access-wqfqr\") pod \"olm-operator-6b444d44fb-hkxr2\" (UID: \"af82926e-f43a-4d57-a5ad-edb6bef1a719\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.038234 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78tf6\" (UniqueName: \"kubernetes.io/projected/f0669320-7b9a-49e6-b24a-23e8ae1c4051-kube-api-access-78tf6\") pod \"multus-admission-controller-857f4d67dd-lrm44\" (UID: \"f0669320-7b9a-49e6-b24a-23e8ae1c4051\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lrm44" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.043366 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.063532 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.070080 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.084518 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl" event={"ID":"3863a76c-1217-480f-9a0b-f7f708af94fc","Type":"ContainerStarted","Data":"3c9a2bfb0ae98df9cf9e2da8aa2597ad3f0aad5833bd893dc5c75b170c9ec22d"} Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.084986 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbh2s\" (UniqueName: \"kubernetes.io/projected/67f753e3-95ec-46e4-bc29-efe016b6e3f7-kube-api-access-qbh2s\") pod \"dns-operator-744455d44c-rkq6j\" (UID: \"67f753e3-95ec-46e4-bc29-efe016b6e3f7\") " pod="openshift-dns-operator/dns-operator-744455d44c-rkq6j" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.090662 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.093892 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-6n9t7" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.096046 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7th45" event={"ID":"53f6fd9a-495b-4e68-9f43-6788ff997184","Type":"ContainerStarted","Data":"4e5bd54e32e06d2189560f03d04291a096141bdacd036e3cd92263d9e16e274e"} Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.108668 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.109467 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.109840 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:39.60982286 +0000 UTC m=+139.045645377 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.118509 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" event={"ID":"99bdb744-6ab3-42ac-9729-137102bdfe72","Type":"ContainerStarted","Data":"3826634ad4b08364807f9655d0343114323fa447437875623a2cc2fb85fcec14"} Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.130022 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7f326c3-8717-44a8-af72-556a8fdbce22-bound-sa-token\") pod \"ingress-operator-5b745b69d9-rsq9z\" (UID: \"e7f326c3-8717-44a8-af72-556a8fdbce22\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.131705 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz" event={"ID":"de0e91e1-5286-4464-823b-7e930e40e360","Type":"ContainerStarted","Data":"4d9ca528ba369d0b577e2b5351520c81326f235953ec50df32919f439a8af032"} Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.132723 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-56ncv" event={"ID":"3b305e45-7c10-459e-b8ed-1192baa0b469","Type":"ContainerStarted","Data":"b87987483ee628007f5924ae901cab6437ccb1ca05d7eabbbe48ab2221a2630c"} Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.134038 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-8hd98" event={"ID":"7647af53-61ba-409d-90c4-25d6ee0a022d","Type":"ContainerStarted","Data":"f3b7bd60c7f882268727b8fb03e6cbcebaf6dfb5ce99e2e43141fa064484f683"} Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.134861 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" event={"ID":"aaf87e66-a7cb-4692-be61-67acc2b09236","Type":"ContainerStarted","Data":"bb4fc072195c507e8cde5be398fc68da697f2188c4242965e6728fb41a670e07"} Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.135947 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5jkd\" (UniqueName: \"kubernetes.io/projected/f0411515-ac32-4ad1-a956-ce737c8d0d75-kube-api-access-x5jkd\") pod \"marketplace-operator-79b997595-ncmcs\" (UID: \"f0411515-ac32-4ad1-a956-ce737c8d0d75\") " pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.136502 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-g47v2" event={"ID":"0436692e-40df-4130-8bd1-2059aeeeac11","Type":"ContainerStarted","Data":"1bf359e6d03cc5d09d01810562709cf895c40f4ea14cb2500a212d3e424381e9"} Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.155122 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqjvg\" (UniqueName: \"kubernetes.io/projected/d7614ad2-ca20-4d63-9a65-40a20bc74c8c-kube-api-access-kqjvg\") pod \"ingress-canary-9xnqw\" (UID: \"d7614ad2-ca20-4d63-9a65-40a20bc74c8c\") " pod="openshift-ingress-canary/ingress-canary-9xnqw" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.170052 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpkfd\" (UniqueName: \"kubernetes.io/projected/d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d-kube-api-access-dpkfd\") pod \"machine-config-server-g8flk\" (UID: \"d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d\") " pod="openshift-machine-config-operator/machine-config-server-g8flk" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.210949 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.211497 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:39.711477456 +0000 UTC m=+139.147300133 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.260925 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-rkq6j" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.277314 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7287k" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.312574 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.313277 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:39.812619184 +0000 UTC m=+139.248441691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.313459 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.313777 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:39.813761361 +0000 UTC m=+139.249583868 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.326805 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.335191 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-lrm44" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.356702 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.416484 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:39.91645814 +0000 UTC m=+139.352280657 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.418425 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.418919 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.419305 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:39.919287933 +0000 UTC m=+139.355110440 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.422008 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65"] Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.424236 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb"] Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.432213 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rttms"] Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.432449 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9xnqw" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.436848 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-g8flk" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.441665 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-854p6"] Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.510719 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv"] Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.514824 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd"] Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.520000 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.520230 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:40.020178053 +0000 UTC m=+139.456000580 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.520821 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.521167 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:40.021154954 +0000 UTC m=+139.456977461 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.558420 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgpxr\" (UniqueName: \"kubernetes.io/projected/2868c6d2-127b-41f0-8d2a-c602d45c339f-kube-api-access-pgpxr\") pod \"csi-hostpathplugin-z9vbh\" (UID: \"2868c6d2-127b-41f0-8d2a-c602d45c339f\") " pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.559851 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bk28l\" (UniqueName: \"kubernetes.io/projected/8e0c0140-161e-4110-999f-3c8c60d481cc-kube-api-access-bk28l\") pod \"service-ca-9c57cc56f-gfbcs\" (UID: \"8e0c0140-161e-4110-999f-3c8c60d481cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-gfbcs" Dec 04 09:40:39 crc kubenswrapper[4707]: W1204 09:40:39.586181 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9186ac86_6e42_41af_a520_839e71f4c41c.slice/crio-f5c4fbe6a43b09964dd8cfda71404c8047a52cdab5dad820f83ae5184257e7db WatchSource:0}: Error finding container f5c4fbe6a43b09964dd8cfda71404c8047a52cdab5dad820f83ae5184257e7db: Status 404 returned error can't find the container with id f5c4fbe6a43b09964dd8cfda71404c8047a52cdab5dad820f83ae5184257e7db Dec 04 09:40:39 crc kubenswrapper[4707]: W1204 09:40:39.598837 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad61d5b3_e5d1_4fdb_bb1d_6e046c06818a.slice/crio-17d4c72bcb205a148fb5d5a4b6739e2b21e506dd75746aca466b2aebce8ac187 WatchSource:0}: Error finding container 17d4c72bcb205a148fb5d5a4b6739e2b21e506dd75746aca466b2aebce8ac187: Status 404 returned error can't find the container with id 17d4c72bcb205a148fb5d5a4b6739e2b21e506dd75746aca466b2aebce8ac187 Dec 04 09:40:39 crc kubenswrapper[4707]: W1204 09:40:39.601029 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7bce0373_35c7_420e_a6cf_7f5bc2d1bba6.slice/crio-4f9469f0cf78eae7da0ad432716e11a2f4c77f05975ecc93e6b17883ad9e1236 WatchSource:0}: Error finding container 4f9469f0cf78eae7da0ad432716e11a2f4c77f05975ecc93e6b17883ad9e1236: Status 404 returned error can't find the container with id 4f9469f0cf78eae7da0ad432716e11a2f4c77f05975ecc93e6b17883ad9e1236 Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.621929 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.622088 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:40.122054105 +0000 UTC m=+139.557876622 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.622171 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.623737 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:40.12371759 +0000 UTC m=+139.559540287 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.687124 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-gfbcs" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.744548 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.746461 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.746739 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:40.246717051 +0000 UTC m=+139.682539558 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.746859 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.747358 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:40.247327011 +0000 UTC m=+139.683149508 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.847778 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.848090 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:40.348076067 +0000 UTC m=+139.783898574 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.884210 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv"] Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.954803 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:39 crc kubenswrapper[4707]: E1204 09:40:39.955362 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:40.455349344 +0000 UTC m=+139.891171851 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:39 crc kubenswrapper[4707]: I1204 09:40:39.969148 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z"] Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.057024 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:40 crc kubenswrapper[4707]: E1204 09:40:40.058008 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:40.557972092 +0000 UTC m=+139.993794629 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.058311 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:40 crc kubenswrapper[4707]: E1204 09:40:40.058738 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:40.558727986 +0000 UTC m=+139.994550493 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.117782 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96"] Dec 04 09:40:40 crc kubenswrapper[4707]: W1204 09:40:40.159821 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd82fac8c_2d84_4a9b_a8ea_9ba9f3e3346d.slice/crio-82ce71ad15790f34fd47c6df8574402212e285f6a8b754edf96fabb175a7b8fa WatchSource:0}: Error finding container 82ce71ad15790f34fd47c6df8574402212e285f6a8b754edf96fabb175a7b8fa: Status 404 returned error can't find the container with id 82ce71ad15790f34fd47c6df8574402212e285f6a8b754edf96fabb175a7b8fa Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.160770 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:40 crc kubenswrapper[4707]: E1204 09:40:40.161120 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:40.661102495 +0000 UTC m=+140.096925002 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.217820 4707 generic.go:334] "Generic (PLEG): container finished" podID="aaf87e66-a7cb-4692-be61-67acc2b09236" containerID="765174ede53751bcd384ea6b26bfb8dee82cb943cd22fb226373c8b2beb8f9c9" exitCode=0 Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.217883 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" event={"ID":"aaf87e66-a7cb-4692-be61-67acc2b09236","Type":"ContainerDied","Data":"765174ede53751bcd384ea6b26bfb8dee82cb943cd22fb226373c8b2beb8f9c9"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.239717 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" event={"ID":"17825893-3f22-4973-a57c-6645ca6a2c31","Type":"ContainerStarted","Data":"029eff84405051a040fbaf412887052f0f00e3a198e1919dc398ac22e88df779"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.268900 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:40 crc kubenswrapper[4707]: E1204 09:40:40.269270 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:40.769253871 +0000 UTC m=+140.205076368 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.270922 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-ddxxg" podStartSLOduration=119.270909866 podStartE2EDuration="1m59.270909866s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:40.269536791 +0000 UTC m=+139.705359318" watchObservedRunningTime="2025-12-04 09:40:40.270909866 +0000 UTC m=+139.706732373" Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.290992 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" event={"ID":"99bdb744-6ab3-42ac-9729-137102bdfe72","Type":"ContainerStarted","Data":"ea150bf48d5ae4a5275c465b6536c79e798e4a16f077c4be4e18b345f7734e39"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.291452 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.293524 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" event={"ID":"e7f326c3-8717-44a8-af72-556a8fdbce22","Type":"ContainerStarted","Data":"9c6a1cf6a73ad0d868a74bae70c79cf05db44117a85c00ee3d8aaf52f3cce412"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.299574 4707 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-v52ch container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.299814 4707 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" podUID="99bdb744-6ab3-42ac-9729-137102bdfe72" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.301114 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl" event={"ID":"3863a76c-1217-480f-9a0b-f7f708af94fc","Type":"ContainerStarted","Data":"2ee42f44cb4c17aa71d605c8f64f9e270e08c74a5eabb8026decb6a0c193cac8"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.309121 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" podStartSLOduration=119.309104201 podStartE2EDuration="1m59.309104201s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:40.306499486 +0000 UTC m=+139.742322003" watchObservedRunningTime="2025-12-04 09:40:40.309104201 +0000 UTC m=+139.744926708" Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.322730 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-g47v2" event={"ID":"0436692e-40df-4130-8bd1-2059aeeeac11","Type":"ContainerStarted","Data":"f7d63d80e802b25fa86a1f37b0ce743135766e2797b3baf3138619922c739c96"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.323480 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-g47v2" Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.329436 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" event={"ID":"ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a","Type":"ContainerStarted","Data":"17d4c72bcb205a148fb5d5a4b6739e2b21e506dd75746aca466b2aebce8ac187"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.341582 4707 patch_prober.go:28] interesting pod/downloads-7954f5f757-g47v2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.341677 4707 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-g47v2" podUID="0436692e-40df-4130-8bd1-2059aeeeac11" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.369988 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:40 crc kubenswrapper[4707]: E1204 09:40:40.372143 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:40.872118196 +0000 UTC m=+140.307940863 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.398026 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-k6cdl" podStartSLOduration=119.39800148 podStartE2EDuration="1m59.39800148s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:40.380882692 +0000 UTC m=+139.816705209" watchObservedRunningTime="2025-12-04 09:40:40.39800148 +0000 UTC m=+139.833823987" Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.405582 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5" event={"ID":"025e2d7e-ab23-4cf6-8d4c-b114ca8733b7","Type":"ContainerStarted","Data":"891524b0b8b43962ac22f182b87c47b2b9c614da9f6b1a6becc543174b38c41b"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.413055 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-g47v2" podStartSLOduration=119.413028521 podStartE2EDuration="1m59.413028521s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:40.404242414 +0000 UTC m=+139.840064931" watchObservedRunningTime="2025-12-04 09:40:40.413028521 +0000 UTC m=+139.848851028" Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.434323 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" podStartSLOduration=119.434300504 podStartE2EDuration="1m59.434300504s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:40.432804366 +0000 UTC m=+139.868626873" watchObservedRunningTime="2025-12-04 09:40:40.434300504 +0000 UTC m=+139.870123011" Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.453704 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-56ncv" event={"ID":"3b305e45-7c10-459e-b8ed-1192baa0b469","Type":"ContainerStarted","Data":"3d7165e38387f82c712562aebbfaa5e880072d0a7b3285b8cb21cec3754aa90b"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.483383 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:40 crc kubenswrapper[4707]: E1204 09:40:40.489551 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:40.989535725 +0000 UTC m=+140.425358232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.497302 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-56ncv" podStartSLOduration=119.497271908 podStartE2EDuration="1m59.497271908s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:40.489427371 +0000 UTC m=+139.925249878" watchObservedRunningTime="2025-12-04 09:40:40.497271908 +0000 UTC m=+139.933094415" Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.519510 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65" event={"ID":"9186ac86-6e42-41af-a520-839e71f4c41c","Type":"ContainerStarted","Data":"f5c4fbe6a43b09964dd8cfda71404c8047a52cdab5dad820f83ae5184257e7db"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.538602 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" event={"ID":"916e5756-f645-44f7-b26d-706a87c57ed8","Type":"ContainerStarted","Data":"b2010ea8593f0fa02b55bceb4c95eb62bb92bfa77c3bde5348406c3879def7f7"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.550154 4707 generic.go:334] "Generic (PLEG): container finished" podID="53f6fd9a-495b-4e68-9f43-6788ff997184" containerID="787fcd31b5ad79c4ad8637949e13e44916a3c5b9e30430f08be63e493aa5b40f" exitCode=0 Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.550274 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7th45" event={"ID":"53f6fd9a-495b-4e68-9f43-6788ff997184","Type":"ContainerDied","Data":"787fcd31b5ad79c4ad8637949e13e44916a3c5b9e30430f08be63e493aa5b40f"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.565854 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz" event={"ID":"de0e91e1-5286-4464-823b-7e930e40e360","Type":"ContainerStarted","Data":"d6a1f26c7545586955435e845e4afd4fcfc22554947e49d33fd53b4dc80eb1f5"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.598477 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:40 crc kubenswrapper[4707]: E1204 09:40:40.599661 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:41.099611685 +0000 UTC m=+140.535434192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.600782 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:40 crc kubenswrapper[4707]: E1204 09:40:40.603109 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:41.103092999 +0000 UTC m=+140.538915506 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.607663 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" event={"ID":"085c976a-20a2-410c-b37f-1fc34eb733da","Type":"ContainerStarted","Data":"41172a5f4920c41a8f3e9217387c011d3b6346aadf204422500e2019ae114bf1"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.609931 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv" event={"ID":"9e9960ba-c1e3-4209-a7d3-ff88ccf73c39","Type":"ContainerStarted","Data":"85ccf40139b0aab3b8002110d9ecbd9ed02df6349ea5f2810b112a92c9442a2c"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.613815 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" event={"ID":"7bce0373-35c7-420e-a6cf-7f5bc2d1bba6","Type":"ContainerStarted","Data":"4f9469f0cf78eae7da0ad432716e11a2f4c77f05975ecc93e6b17883ad9e1236"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.617826 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dqzrz" podStartSLOduration=119.617811449 podStartE2EDuration="1m59.617811449s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:40.614689457 +0000 UTC m=+140.050511964" watchObservedRunningTime="2025-12-04 09:40:40.617811449 +0000 UTC m=+140.053633956" Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.634744 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" event={"ID":"0ebc6de2-21f5-41ae-800d-8ceb365c7b88","Type":"ContainerStarted","Data":"c681183f8bd473ee1c01b9c3abf106affa5cb2fcd0dd7bac778e27e630b9abb4"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.635704 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.674659 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" event={"ID":"5a08fa03-e041-425a-b5e8-05300cdac87b","Type":"ContainerStarted","Data":"210394fcfa37dd33c0753bc7720ed7aef0054a3132bf0b2f8bc9452e1b9ee7d9"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.695582 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-854p6" event={"ID":"91599765-e650-4b7d-9681-a509921b0f24","Type":"ContainerStarted","Data":"797e2dd049c7cccdec9b8f2c0cab7b199de396800723bd29b642f1c8d25f6065"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.701998 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:40 crc kubenswrapper[4707]: E1204 09:40:40.702241 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:41.202221911 +0000 UTC m=+140.638044418 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.702363 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:40 crc kubenswrapper[4707]: E1204 09:40:40.703890 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:41.203879196 +0000 UTC m=+140.639701703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.725742 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" podStartSLOduration=119.725725578 podStartE2EDuration="1m59.725725578s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:40.724718615 +0000 UTC m=+140.160541122" watchObservedRunningTime="2025-12-04 09:40:40.725725578 +0000 UTC m=+140.161548085" Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.763243 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" event={"ID":"035089c9-d1b0-465a-93eb-ec137a57d79c","Type":"ContainerStarted","Data":"005c854ed5759066afcbf4194174524dde968f0b68b166b2443ffea4f97d2d01"} Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.804439 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:40 crc kubenswrapper[4707]: E1204 09:40:40.804575 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:41.304558389 +0000 UTC m=+140.740380896 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.804911 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:40 crc kubenswrapper[4707]: E1204 09:40:40.805836 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:41.3058294 +0000 UTC m=+140.741651907 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.907701 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:40 crc kubenswrapper[4707]: E1204 09:40:40.907960 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:41.40793253 +0000 UTC m=+140.843755037 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:40 crc kubenswrapper[4707]: I1204 09:40:40.908460 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:40 crc kubenswrapper[4707]: E1204 09:40:40.909099 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:41.409088878 +0000 UTC m=+140.844911385 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.012794 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:41 crc kubenswrapper[4707]: E1204 09:40:41.013230 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:41.513211543 +0000 UTC m=+140.949034050 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.013481 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-lrm44"] Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.030070 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-6n9t7"] Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.114893 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:41 crc kubenswrapper[4707]: E1204 09:40:41.115575 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:41.615555961 +0000 UTC m=+141.051378468 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.216080 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:41 crc kubenswrapper[4707]: E1204 09:40:41.216588 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:41.716570255 +0000 UTC m=+141.152392762 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.224309 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj"] Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.265709 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.318923 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:41 crc kubenswrapper[4707]: E1204 09:40:41.319377 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:41.819327786 +0000 UTC m=+141.255150293 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.420417 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:41 crc kubenswrapper[4707]: E1204 09:40:41.421119 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:41.921099156 +0000 UTC m=+141.356921663 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.524849 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:41 crc kubenswrapper[4707]: E1204 09:40:41.525466 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:42.025449448 +0000 UTC m=+141.461271975 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.580570 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9"] Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.588891 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9847h"] Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.627257 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:41 crc kubenswrapper[4707]: E1204 09:40:41.627618 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:42.12760331 +0000 UTC m=+141.563425817 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.699186 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf"] Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.729932 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:41 crc kubenswrapper[4707]: E1204 09:40:41.730246 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:42.230235876 +0000 UTC m=+141.666058383 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.773900 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rkq6j"] Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.797010 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" event={"ID":"5a08fa03-e041-425a-b5e8-05300cdac87b","Type":"ContainerStarted","Data":"cc8ef3604288f143fafc9bb4738b286bf47a10f5f2dfe9cd2ba4fc585542d5eb"} Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.805209 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9xnqw"] Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.833912 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-22vzf" podStartSLOduration=120.833892397 podStartE2EDuration="2m0.833892397s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:41.832039607 +0000 UTC m=+141.267862124" watchObservedRunningTime="2025-12-04 09:40:41.833892397 +0000 UTC m=+141.269714904" Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.834199 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:41 crc kubenswrapper[4707]: E1204 09:40:41.834528 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:42.334511237 +0000 UTC m=+141.770333744 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.834711 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:41 crc kubenswrapper[4707]: E1204 09:40:41.835896 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:42.335887003 +0000 UTC m=+141.771709510 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.855265 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-z9vbh"] Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.860650 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" event={"ID":"085c976a-20a2-410c-b37f-1fc34eb733da","Type":"ContainerStarted","Data":"cfe6a00bc1a30e900fa01df458f0d9eed7acd01a256c015bb5e7ab153b2ac949"} Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.873573 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-lrm44" event={"ID":"f0669320-7b9a-49e6-b24a-23e8ae1c4051","Type":"ContainerStarted","Data":"eb52db37454365ed70707bedfef2b15500fcb07bc2fa3424bec92736c07e58d5"} Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.926411 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc"] Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.939772 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:41 crc kubenswrapper[4707]: E1204 09:40:41.940924 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:42.440908627 +0000 UTC m=+141.876731134 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.963322 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-854p6" event={"ID":"91599765-e650-4b7d-9681-a509921b0f24","Type":"ContainerStarted","Data":"eea441abff8acacf50266c437b031f06bb7a2a6cb9b5bf5ea5fc2bb642f5fd13"} Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.965417 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:41 crc kubenswrapper[4707]: I1204 09:40:41.982929 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k"] Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.008456 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6n9t7" event={"ID":"8cedd2a3-4056-4c09-b1ab-a9596cff261b","Type":"ContainerStarted","Data":"1e449c7df485a44fd68b5219d9f57356d7b9a7c434c564ae09341225ff8969dd"} Dec 04 09:40:42 crc kubenswrapper[4707]: W1204 09:40:42.031847 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd7614ad2_ca20_4d63_9a65_40a20bc74c8c.slice/crio-312ef5ca6d3e47fc65997ea5f730e0a172c5af042399614fc51b4b96f4270c5c WatchSource:0}: Error finding container 312ef5ca6d3e47fc65997ea5f730e0a172c5af042399614fc51b4b96f4270c5c: Status 404 returned error can't find the container with id 312ef5ca6d3e47fc65997ea5f730e0a172c5af042399614fc51b4b96f4270c5c Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.034986 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dwmkj"] Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.051273 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9" event={"ID":"232e4fba-ab9d-46de-9d0b-7311ddd1bcab","Type":"ContainerStarted","Data":"20f690254150ae184217ebb677fa37ffbb0eef38f1e9ed00692a01989be7eeb6"} Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.052856 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:42 crc kubenswrapper[4707]: E1204 09:40:42.054483 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:42.55446411 +0000 UTC m=+141.990286807 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.055631 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-854p6" podStartSLOduration=121.055613098 podStartE2EDuration="2m1.055613098s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:42.051864755 +0000 UTC m=+141.487687272" watchObservedRunningTime="2025-12-04 09:40:42.055613098 +0000 UTC m=+141.491435605" Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.068548 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gfbcs"] Dec 04 09:40:42 crc kubenswrapper[4707]: W1204 09:40:42.089899 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2868c6d2_127b_41f0_8d2a_c602d45c339f.slice/crio-8798ea19653be18caf5fda12e367cd04c84b624436180af03b956562651f2f99 WatchSource:0}: Error finding container 8798ea19653be18caf5fda12e367cd04c84b624436180af03b956562651f2f99: Status 404 returned error can't find the container with id 8798ea19653be18caf5fda12e367cd04c84b624436180af03b956562651f2f99 Dec 04 09:40:42 crc kubenswrapper[4707]: W1204 09:40:42.091780 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod109f779b_e5dc_4c5c_910b_5ddc2c9ec3e1.slice/crio-def3b68efad02b092e71ed3b8828b86ba2c19805c46958892ff33965e554b5bd WatchSource:0}: Error finding container def3b68efad02b092e71ed3b8828b86ba2c19805c46958892ff33965e554b5bd: Status 404 returned error can't find the container with id def3b68efad02b092e71ed3b8828b86ba2c19805c46958892ff33965e554b5bd Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.094124 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nfrdd" event={"ID":"ad61d5b3-e5d1-4fdb-bb1d-6e046c06818a","Type":"ContainerStarted","Data":"a4d7d1e58bfe553ab6d4fd96b3f8474c320d9360b60783a15d0068c271d94dce"} Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.132143 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5" event={"ID":"025e2d7e-ab23-4cf6-8d4c-b114ca8733b7","Type":"ContainerStarted","Data":"f8769d654d6f7b4f10c94cef737fe1b076a8a22baf18753b326aa2fc438fd4c9"} Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.132198 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5" event={"ID":"025e2d7e-ab23-4cf6-8d4c-b114ca8733b7","Type":"ContainerStarted","Data":"96797509332a3e60e00594a172cabae501a93048867de9dd21eb2af4b41dc29b"} Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.159142 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj" event={"ID":"5ba20b9a-0c04-416f-963f-610d9be4cef1","Type":"ContainerStarted","Data":"6d5e4505b7f3fa530db8ff0d28bbf147f5b7220ec9a98ea4ab9cf5aa34da2c17"} Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.160369 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:42 crc kubenswrapper[4707]: E1204 09:40:42.160686 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:42.660667934 +0000 UTC m=+142.096490441 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.160754 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:42 crc kubenswrapper[4707]: E1204 09:40:42.161539 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:42.661530752 +0000 UTC m=+142.097353259 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.162591 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hcnc5" podStartSLOduration=121.162567496 podStartE2EDuration="2m1.162567496s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:42.160151217 +0000 UTC m=+141.595973724" watchObservedRunningTime="2025-12-04 09:40:42.162567496 +0000 UTC m=+141.598390013" Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.162811 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65" event={"ID":"9186ac86-6e42-41af-a520-839e71f4c41c","Type":"ContainerStarted","Data":"1f63db10157b0d2925246fcab3d703650d13dc9635fd785b40cbc5dd9fbf85a5"} Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.179660 4707 generic.go:334] "Generic (PLEG): container finished" podID="7bce0373-35c7-420e-a6cf-7f5bc2d1bba6" containerID="d0c65a03e175230fa86e2ea7ec98ff2fba20065615aa447767436d054302550a" exitCode=0 Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.179728 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" event={"ID":"7bce0373-35c7-420e-a6cf-7f5bc2d1bba6","Type":"ContainerDied","Data":"d0c65a03e175230fa86e2ea7ec98ff2fba20065615aa447767436d054302550a"} Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.188624 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-g8flk" event={"ID":"d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d","Type":"ContainerStarted","Data":"82ce71ad15790f34fd47c6df8574402212e285f6a8b754edf96fabb175a7b8fa"} Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.189638 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj" podStartSLOduration=121.189626688 podStartE2EDuration="2m1.189626688s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:42.188203342 +0000 UTC m=+141.624025859" watchObservedRunningTime="2025-12-04 09:40:42.189626688 +0000 UTC m=+141.625449195" Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.195646 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-8hd98" event={"ID":"7647af53-61ba-409d-90c4-25d6ee0a022d","Type":"ContainerStarted","Data":"5d6336c3f824e12d0b534fe50d305c4f5b7b5bbe605488bbacf68f8a67712a9c"} Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.208387 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-7287k"] Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.221429 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" event={"ID":"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5","Type":"ContainerStarted","Data":"098aac17f1fa672b4c1b140b19a8fc7008b65c3bf92f7ddd47f221bf2b4cff29"} Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.223235 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" event={"ID":"3bd5f0ac-77dd-4a9e-8343-6df2079e0355","Type":"ContainerStarted","Data":"3fb3618cc8e41fb131b6cd0676ea6224efbeecb6916e4fb4e270f1a7054fd619"} Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.228081 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9kn65" podStartSLOduration=121.228061191 podStartE2EDuration="2m1.228061191s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:42.220722382 +0000 UTC m=+141.656544889" watchObservedRunningTime="2025-12-04 09:40:42.228061191 +0000 UTC m=+141.663883698" Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.236837 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv" event={"ID":"9e9960ba-c1e3-4209-a7d3-ff88ccf73c39","Type":"ContainerStarted","Data":"170d13576e3eeeef64843b1d0e3fdace3820aea00da32f974c3027b1340eee96"} Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.240459 4707 patch_prober.go:28] interesting pod/downloads-7954f5f757-g47v2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.240500 4707 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-g47v2" podUID="0436692e-40df-4130-8bd1-2059aeeeac11" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.250673 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.262024 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:42 crc kubenswrapper[4707]: E1204 09:40:42.266879 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:42.766855187 +0000 UTC m=+142.202677844 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.282127 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zd7sv" podStartSLOduration=121.282110804 podStartE2EDuration="2m1.282110804s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:42.280619826 +0000 UTC m=+141.716442333" watchObservedRunningTime="2025-12-04 09:40:42.282110804 +0000 UTC m=+141.717933311" Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.329493 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-8hd98" podStartSLOduration=121.329470909 podStartE2EDuration="2m1.329470909s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:42.316091603 +0000 UTC m=+141.751914110" watchObservedRunningTime="2025-12-04 09:40:42.329470909 +0000 UTC m=+141.765293416" Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.331986 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2"] Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.367903 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:42 crc kubenswrapper[4707]: E1204 09:40:42.372615 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:42.872601166 +0000 UTC m=+142.308423673 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.391008 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-g8flk" podStartSLOduration=7.390992195 podStartE2EDuration="7.390992195s" podCreationTimestamp="2025-12-04 09:40:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:42.348746887 +0000 UTC m=+141.784569394" watchObservedRunningTime="2025-12-04 09:40:42.390992195 +0000 UTC m=+141.826814702" Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.392561 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc"] Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.425199 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4"] Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.468961 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:42 crc kubenswrapper[4707]: E1204 09:40:42.469606 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:42.969591438 +0000 UTC m=+142.405413945 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.484055 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ncmcs"] Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.491494 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-854p6" Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.573778 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:42 crc kubenswrapper[4707]: E1204 09:40:42.582822 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:43.082804021 +0000 UTC m=+142.518626538 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.626908 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.638619 4707 patch_prober.go:28] interesting pod/router-default-5444994796-8hd98 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 09:40:42 crc kubenswrapper[4707]: [-]has-synced failed: reason withheld Dec 04 09:40:42 crc kubenswrapper[4707]: [+]process-running ok Dec 04 09:40:42 crc kubenswrapper[4707]: healthz check failed Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.638690 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8hd98" podUID="7647af53-61ba-409d-90c4-25d6ee0a022d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.682922 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:42 crc kubenswrapper[4707]: E1204 09:40:42.683187 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:43.183173754 +0000 UTC m=+142.618996261 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.792919 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:42 crc kubenswrapper[4707]: E1204 09:40:42.793646 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:43.293630566 +0000 UTC m=+142.729453073 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.893991 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:42 crc kubenswrapper[4707]: E1204 09:40:42.894433 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:43.394415492 +0000 UTC m=+142.830238009 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:42 crc kubenswrapper[4707]: I1204 09:40:42.995356 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:42 crc kubenswrapper[4707]: E1204 09:40:42.995670 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:43.495658284 +0000 UTC m=+142.931480791 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.103855 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:43 crc kubenswrapper[4707]: E1204 09:40:43.104404 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:43.604360269 +0000 UTC m=+143.040182776 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.211130 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:43 crc kubenswrapper[4707]: E1204 09:40:43.211476 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:43.711463862 +0000 UTC m=+143.147286359 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.268323 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dwmkj" event={"ID":"7528abe2-fb27-4c14-88c6-98fcbb716395","Type":"ContainerStarted","Data":"011c0849f71a43b8cd4cf2e855f1dd5ca68c190711c78e1c97076aa69ef5648c"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.268380 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dwmkj" event={"ID":"7528abe2-fb27-4c14-88c6-98fcbb716395","Type":"ContainerStarted","Data":"60a94b66e3a5872a5890bda25ce5945538e5584008f563c4aa4c4fe45bd18cf7"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.270143 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" event={"ID":"06984eba-18ca-42bf-bcd0-787f28f91d4b","Type":"ContainerStarted","Data":"b66ef283d4d7762af840dc2295de8f2555ba23023db1545af62ddc756d83c76d"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.271698 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" event={"ID":"aaf87e66-a7cb-4692-be61-67acc2b09236","Type":"ContainerStarted","Data":"0581f307f4ee01ed11103807743887bc3fb9732582333998c4bb33fb216fb937"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.274253 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-g8flk" event={"ID":"d82fac8c-2d84-4a9b-a8ea-9ba9f3e3346d","Type":"ContainerStarted","Data":"ec4ed507780f0fc8aab505d1baa73efeb7a0859296d1e7e91f3f356e5fc1499f"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.275769 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc" event={"ID":"bd08c265-693b-4ca4-986d-45fa202caca7","Type":"ContainerStarted","Data":"6cceb679b014732f6efbfd1fc2a910ad4b88833657da27d3faad2f53a9bebf3b"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.279496 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" event={"ID":"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1","Type":"ContainerStarted","Data":"def3b68efad02b092e71ed3b8828b86ba2c19805c46958892ff33965e554b5bd"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.286909 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7th45" event={"ID":"53f6fd9a-495b-4e68-9f43-6788ff997184","Type":"ContainerStarted","Data":"fef7b3e91b2fe2d7d5caf893b656d2e742c15bc3bf5d5ac363f02e4821eea426"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.294158 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dwmkj" podStartSLOduration=122.294144969 podStartE2EDuration="2m2.294144969s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:43.292179184 +0000 UTC m=+142.728001691" watchObservedRunningTime="2025-12-04 09:40:43.294144969 +0000 UTC m=+142.729967476" Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.307600 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" event={"ID":"3bd5f0ac-77dd-4a9e-8343-6df2079e0355","Type":"ContainerStarted","Data":"18de2f793779546034ae19850e82155354b134935f4aec3051afd649dc3145d3"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.312739 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:43 crc kubenswrapper[4707]: E1204 09:40:43.313145 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:43.813129498 +0000 UTC m=+143.248952005 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.314425 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-gfbcs" event={"ID":"8e0c0140-161e-4110-999f-3c8c60d481cc","Type":"ContainerStarted","Data":"cf817a1f5ede3bad5ffe92d33ca4a05fb0aafed281495df4b128cbc3c71d4134"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.332813 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" event={"ID":"f0411515-ac32-4ad1-a956-ce737c8d0d75","Type":"ContainerStarted","Data":"acb5917138225493b6dc86fff796d9f3bd60c524fcab69f88ca29a5f1bc19238"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.376636 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" event={"ID":"e7f326c3-8717-44a8-af72-556a8fdbce22","Type":"ContainerStarted","Data":"54fcfcd9f674d4f779ad82b98b46de1c45dbc517cb84f33cf27a69e2e22ee669"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.406572 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" event={"ID":"c41b5f5f-8be6-43a7-99ef-d9b26f8da6f5","Type":"ContainerStarted","Data":"150facab0fe53f26d240f4c6a7bcb74899bc20cc623926210b634b96a4bd470b"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.421551 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:43 crc kubenswrapper[4707]: E1204 09:40:43.422900 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:43.922882097 +0000 UTC m=+143.358704804 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.449958 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" podStartSLOduration=122.449928339 podStartE2EDuration="2m2.449928339s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:43.352932776 +0000 UTC m=+142.788755283" watchObservedRunningTime="2025-12-04 09:40:43.449928339 +0000 UTC m=+142.885750846" Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.454018 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" event={"ID":"085c976a-20a2-410c-b37f-1fc34eb733da","Type":"ContainerStarted","Data":"1c98c48189989bfdf2c96aabeda9427852e6d7959d409740cf50b6d788869b85"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.486502 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" event={"ID":"2868c6d2-127b-41f0-8d2a-c602d45c339f","Type":"ContainerStarted","Data":"8798ea19653be18caf5fda12e367cd04c84b624436180af03b956562651f2f99"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.516603 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-9847h" podStartSLOduration=122.516557442 podStartE2EDuration="2m2.516557442s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:43.453153084 +0000 UTC m=+142.888975591" watchObservedRunningTime="2025-12-04 09:40:43.516557442 +0000 UTC m=+142.952379949" Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.523160 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:43 crc kubenswrapper[4707]: E1204 09:40:43.524702 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:44.024679417 +0000 UTC m=+143.460501924 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.529620 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-82bkj" event={"ID":"5ba20b9a-0c04-416f-963f-610d9be4cef1","Type":"ContainerStarted","Data":"fc1c04846d863c44ca17e41842dffe6dfa8468053e85a987ff26788ee8793d32"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.572761 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" event={"ID":"060fcdca-499b-4e84-81a2-144a175efe6f","Type":"ContainerStarted","Data":"b3b98bdce3cf2fd7faf5a880cb4579c201f3f7a8ab98e4bf6740a04c483de9e3"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.599322 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rkq6j" event={"ID":"67f753e3-95ec-46e4-bc29-efe016b6e3f7","Type":"ContainerStarted","Data":"440fb1ede72c99477d166d7f1597979501df413a5f0f06d332dc84f4835e4a41"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.626403 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:43 crc kubenswrapper[4707]: E1204 09:40:43.626694 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:44.126682094 +0000 UTC m=+143.562504601 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.629682 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9" event={"ID":"232e4fba-ab9d-46de-9d0b-7311ddd1bcab","Type":"ContainerStarted","Data":"75bb696e7af10ac72306d92d1ac711b056f7a0e4beab367f3f0ec64c379818a7"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.640784 4707 patch_prober.go:28] interesting pod/router-default-5444994796-8hd98 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 09:40:43 crc kubenswrapper[4707]: [-]has-synced failed: reason withheld Dec 04 09:40:43 crc kubenswrapper[4707]: [+]process-running ok Dec 04 09:40:43 crc kubenswrapper[4707]: healthz check failed Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.640831 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8hd98" podUID="7647af53-61ba-409d-90c4-25d6ee0a022d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.664676 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9xnqw" event={"ID":"d7614ad2-ca20-4d63-9a65-40a20bc74c8c","Type":"ContainerStarted","Data":"ff287cc5e66923d3a81a843c5fc907928bf3d58a13ee6cd8ce4c2603ee01833b"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.664724 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9xnqw" event={"ID":"d7614ad2-ca20-4d63-9a65-40a20bc74c8c","Type":"ContainerStarted","Data":"312ef5ca6d3e47fc65997ea5f730e0a172c5af042399614fc51b4b96f4270c5c"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.698123 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8vksb" podStartSLOduration=122.698104532 podStartE2EDuration="2m2.698104532s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:43.518501586 +0000 UTC m=+142.954324093" watchObservedRunningTime="2025-12-04 09:40:43.698104532 +0000 UTC m=+143.133927039" Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.724097 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf" event={"ID":"ee5d7413-ce83-4601-9e30-13ad2d2d1768","Type":"ContainerStarted","Data":"d620d0d91cab8ea023f851b1b85925103f69b7e4bb4b98d62b819d5f4eabdf08"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.724154 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf" event={"ID":"ee5d7413-ce83-4601-9e30-13ad2d2d1768","Type":"ContainerStarted","Data":"b1bf93bd30fb85cafe068098b767ff8804735e88bce0dd49fb8e670a1c7bd3de"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.727924 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:43 crc kubenswrapper[4707]: E1204 09:40:43.729114 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:44.229098583 +0000 UTC m=+143.664921090 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.769124 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-9xnqw" podStartSLOduration=8.769106878 podStartE2EDuration="8.769106878s" podCreationTimestamp="2025-12-04 09:40:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:43.699651793 +0000 UTC m=+143.135474310" watchObservedRunningTime="2025-12-04 09:40:43.769106878 +0000 UTC m=+143.204929375" Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.781268 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" event={"ID":"035089c9-d1b0-465a-93eb-ec137a57d79c","Type":"ContainerStarted","Data":"280b52362d7d55514b4a54a1ac131dbbf47c638346fa2dd730cc184cabb9778f"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.786374 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6n9t7" event={"ID":"8cedd2a3-4056-4c09-b1ab-a9596cff261b","Type":"ContainerStarted","Data":"196928bae471c24959580fc5266989ab30ea70c647c421f875744d2aea0c3db6"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.787381 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" event={"ID":"af82926e-f43a-4d57-a5ad-edb6bef1a719","Type":"ContainerStarted","Data":"24d93b58dd13c708aedfe65034eedd1a11d84214977301fac7a5d0bf66682bf5"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.822897 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" event={"ID":"916e5756-f645-44f7-b26d-706a87c57ed8","Type":"ContainerStarted","Data":"5249534dae1a114a662aa6a69f7a7a05a231d80d84ce2896c2def18f8847a7d0"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.824365 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.829757 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-b6bzf" podStartSLOduration=122.829733065 podStartE2EDuration="2m2.829733065s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:43.771465915 +0000 UTC m=+143.207288422" watchObservedRunningTime="2025-12-04 09:40:43.829733065 +0000 UTC m=+143.265555602" Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.832400 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:43 crc kubenswrapper[4707]: E1204 09:40:43.832669 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:44.33265422 +0000 UTC m=+143.768476727 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.865070 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh6kq" podStartSLOduration=122.865053057 podStartE2EDuration="2m2.865053057s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:43.824693721 +0000 UTC m=+143.260516228" watchObservedRunningTime="2025-12-04 09:40:43.865053057 +0000 UTC m=+143.300875564" Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.866021 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-lrm44" event={"ID":"f0669320-7b9a-49e6-b24a-23e8ae1c4051","Type":"ContainerStarted","Data":"c799c863f4ff9bd369112fb6edb7ae8200b7dab713e7a34b4883d769445bf239"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.902572 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7287k" event={"ID":"822c86b6-e71b-471c-a0bc-1537af9e7c36","Type":"ContainerStarted","Data":"446ed306e16c9abc0b3258c1408606741469d97521a017f19bc6a40c89a0cf7f"} Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.904630 4707 patch_prober.go:28] interesting pod/downloads-7954f5f757-g47v2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.923715 4707 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-g47v2" podUID="0436692e-40df-4130-8bd1-2059aeeeac11" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 04 09:40:43 crc kubenswrapper[4707]: I1204 09:40:43.952437 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:43 crc kubenswrapper[4707]: E1204 09:40:43.952878 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:44.45286077 +0000 UTC m=+143.888683277 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.058474 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.064140 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:44.56412418 +0000 UTC m=+143.999946687 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.160664 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.161086 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:44.66104891 +0000 UTC m=+144.096871417 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.161185 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.161470 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:44.661461074 +0000 UTC m=+144.097283581 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.262550 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.263399 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:44.763381027 +0000 UTC m=+144.199203534 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.371050 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.371412 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:44.87140067 +0000 UTC m=+144.307223177 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.472050 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.472230 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:44.972204208 +0000 UTC m=+144.408026715 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.472379 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.472686 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:44.972679013 +0000 UTC m=+144.408501520 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.573487 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.573661 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.073636846 +0000 UTC m=+144.509459353 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.573842 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.574166 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.074157972 +0000 UTC m=+144.509980479 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.640697 4707 patch_prober.go:28] interesting pod/router-default-5444994796-8hd98 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 09:40:44 crc kubenswrapper[4707]: [-]has-synced failed: reason withheld Dec 04 09:40:44 crc kubenswrapper[4707]: [+]process-running ok Dec 04 09:40:44 crc kubenswrapper[4707]: healthz check failed Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.640791 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8hd98" podUID="7647af53-61ba-409d-90c4-25d6ee0a022d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.648101 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.675245 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.675466 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.175440015 +0000 UTC m=+144.611262522 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.675731 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.676048 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.176031775 +0000 UTC m=+144.611854282 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.688464 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" podStartSLOduration=123.6884464 podStartE2EDuration="2m3.6884464s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:43.865840312 +0000 UTC m=+143.301662819" watchObservedRunningTime="2025-12-04 09:40:44.6884464 +0000 UTC m=+144.124268907" Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.698373 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.776505 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.776992 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.276947006 +0000 UTC m=+144.712769513 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.777155 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.777601 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.277588787 +0000 UTC m=+144.713411294 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.883141 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.883982 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.383951726 +0000 UTC m=+144.819774233 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.884301 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.884616 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.384608517 +0000 UTC m=+144.820431024 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.949801 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" event={"ID":"06984eba-18ca-42bf-bcd0-787f28f91d4b","Type":"ContainerStarted","Data":"f0f7fc15c873f234edeea8b8bf4d8f7ae79cf18b83df13239bfecb1c4a3f58d1"} Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.951685 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.981650 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.981951 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" event={"ID":"af82926e-f43a-4d57-a5ad-edb6bef1a719","Type":"ContainerStarted","Data":"c262fc24a1ba705ebf9f73ade54be24105b3539923c333cc35ff29d1e67c5bc7"} Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.982814 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.985311 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:44 crc kubenswrapper[4707]: E1204 09:40:44.986092 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.486061945 +0000 UTC m=+144.921884452 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.990827 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-d58h4" podStartSLOduration=123.990808491 podStartE2EDuration="2m3.990808491s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:44.988875057 +0000 UTC m=+144.424697554" watchObservedRunningTime="2025-12-04 09:40:44.990808491 +0000 UTC m=+144.426630998" Dec 04 09:40:44 crc kubenswrapper[4707]: I1204 09:40:44.999238 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.015773 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rkq6j" event={"ID":"67f753e3-95ec-46e4-bc29-efe016b6e3f7","Type":"ContainerStarted","Data":"3d41f3bfa3fe18d6688d1b7e1d650a24ccfd925522be3b9cde692ecaccaadcf0"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.015831 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rkq6j" event={"ID":"67f753e3-95ec-46e4-bc29-efe016b6e3f7","Type":"ContainerStarted","Data":"16b5ee97daa1b4ec02f77499004a55ce3ad0c9f651d17842aa6f529097b41074"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.037815 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7287k" event={"ID":"822c86b6-e71b-471c-a0bc-1537af9e7c36","Type":"ContainerStarted","Data":"2196bc9e64c5b56a0f258ec9137c74d1ca5f1e4aa6fb158ad859cb47bbfe2e7b"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.037872 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7287k" event={"ID":"822c86b6-e71b-471c-a0bc-1537af9e7c36","Type":"ContainerStarted","Data":"0eafdeb1fe9136de8337f6c26ac411642c3068fb6fe34b3e8489aef78a6a4009"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.038322 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hkxr2" podStartSLOduration=124.038304059 podStartE2EDuration="2m4.038304059s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.035731176 +0000 UTC m=+144.471553673" watchObservedRunningTime="2025-12-04 09:40:45.038304059 +0000 UTC m=+144.474126566" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.073882 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" event={"ID":"e7f326c3-8717-44a8-af72-556a8fdbce22","Type":"ContainerStarted","Data":"4744470097bf8a4d1c62923d1b8d54e86a450da7d8b8b8910340a5ff8abed3da"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.089474 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:45 crc kubenswrapper[4707]: E1204 09:40:45.090696 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.590680127 +0000 UTC m=+145.026502824 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.102894 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7th45" event={"ID":"53f6fd9a-495b-4e68-9f43-6788ff997184","Type":"ContainerStarted","Data":"d23bbe5b073b7832662188de255806d913d6c7c9a5bc8a96517cde0622a7f206"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.135800 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" event={"ID":"2868c6d2-127b-41f0-8d2a-c602d45c339f","Type":"ContainerStarted","Data":"6e630beea3439ce6bc7ca0b0ac611c3cdc8bc63ca98e097fc62960a4158e868c"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.166383 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-lrm44" event={"ID":"f0669320-7b9a-49e6-b24a-23e8ae1c4051","Type":"ContainerStarted","Data":"1326139f0fd43a4965b9909a0d2f72a3c19c8601f88afdc5e630eef9a7eda776"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.184278 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rsq9z" podStartSLOduration=124.18426358 podStartE2EDuration="2m4.18426358s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.183833435 +0000 UTC m=+144.619655942" watchObservedRunningTime="2025-12-04 09:40:45.18426358 +0000 UTC m=+144.620086087" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.197108 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:45 crc kubenswrapper[4707]: E1204 09:40:45.197385 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.697367147 +0000 UTC m=+145.133189654 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:45 crc kubenswrapper[4707]: E1204 09:40:45.197931 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.697913805 +0000 UTC m=+145.133736312 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.197500 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.199024 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" event={"ID":"060fcdca-499b-4e84-81a2-144a175efe6f","Type":"ContainerStarted","Data":"d38e09e3299890d2b00aaaff78355f3ba82e15318b277c7643bcd8b9c56acff2"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.199551 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.221600 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" event={"ID":"f0411515-ac32-4ad1-a956-ce737c8d0d75","Type":"ContainerStarted","Data":"5ea293cf21194305a675510f4b4c7e7f0097cc2919231a8170612e9c0145805f"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.222154 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.234229 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7287k" podStartSLOduration=124.234208728 podStartE2EDuration="2m4.234208728s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.231757888 +0000 UTC m=+144.667580395" watchObservedRunningTime="2025-12-04 09:40:45.234208728 +0000 UTC m=+144.670031235" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.248162 4707 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-ncmcs container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.248217 4707 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" podUID="f0411515-ac32-4ad1-a956-ce737c8d0d75" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.253572 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" event={"ID":"3bd5f0ac-77dd-4a9e-8343-6df2079e0355","Type":"ContainerStarted","Data":"8a0781efc0b9253036c49952a2e29a44b32b86f841ac8abb7619e97f76241b08"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.299961 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:45 crc kubenswrapper[4707]: E1204 09:40:45.300594 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.800570512 +0000 UTC m=+145.236393029 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.301100 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.303297 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" event={"ID":"7bce0373-35c7-420e-a6cf-7f5bc2d1bba6","Type":"ContainerStarted","Data":"2ba559c7f46e1d57a3346298256ad3f6be3cfa980dd740c3664777ad1b739b02"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.303595 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" Dec 04 09:40:45 crc kubenswrapper[4707]: E1204 09:40:45.304034 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.803999064 +0000 UTC m=+145.239821571 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.318526 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-rkq6j" podStartSLOduration=124.318509308 podStartE2EDuration="2m4.318509308s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.271487623 +0000 UTC m=+144.707310130" watchObservedRunningTime="2025-12-04 09:40:45.318509308 +0000 UTC m=+144.754331815" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.341973 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-gfbcs" event={"ID":"8e0c0140-161e-4110-999f-3c8c60d481cc","Type":"ContainerStarted","Data":"532f5517f474cb7e7f36db526dc1012bb9178fca4fdc28914fa1e0d233b13dbb"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.351443 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9" event={"ID":"232e4fba-ab9d-46de-9d0b-7311ddd1bcab","Type":"ContainerStarted","Data":"f9b5655f078f56e8e2a05a8398b5f1ff2e96afc9c710e5242cc4347453970304"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.352116 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.355413 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-7th45" podStartSLOduration=124.35539289 podStartE2EDuration="2m4.35539289s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.318508928 +0000 UTC m=+144.754331455" watchObservedRunningTime="2025-12-04 09:40:45.35539289 +0000 UTC m=+144.791215397" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.356751 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-lrm44" podStartSLOduration=124.356734634 podStartE2EDuration="2m4.356734634s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.354568883 +0000 UTC m=+144.790391390" watchObservedRunningTime="2025-12-04 09:40:45.356734634 +0000 UTC m=+144.792557141" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.361858 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc" event={"ID":"bd08c265-693b-4ca4-986d-45fa202caca7","Type":"ContainerStarted","Data":"fffa0d1cb970bb7eff9bac1e2ce282e678a0f1ccee5c88b928daf3f7d9d2b664"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.396018 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" podStartSLOduration=124.396000654 podStartE2EDuration="2m4.396000654s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.394890138 +0000 UTC m=+144.830712655" watchObservedRunningTime="2025-12-04 09:40:45.396000654 +0000 UTC m=+144.831823161" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.407863 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:45 crc kubenswrapper[4707]: E1204 09:40:45.408429 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.908410168 +0000 UTC m=+145.344232695 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.408526 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.408031 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" event={"ID":"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1","Type":"ContainerStarted","Data":"d56192023c3786e4a0a996f48568c68fdda895ba4bcf0d3f7268cdda2fe3e9ee"} Dec 04 09:40:45 crc kubenswrapper[4707]: E1204 09:40:45.409249 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:45.909241966 +0000 UTC m=+145.345064473 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.434361 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-lwb96" podStartSLOduration=124.434346235 podStartE2EDuration="2m4.434346235s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.432997901 +0000 UTC m=+144.868820408" watchObservedRunningTime="2025-12-04 09:40:45.434346235 +0000 UTC m=+144.870168742" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.444688 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6n9t7" event={"ID":"8cedd2a3-4056-4c09-b1ab-a9596cff261b","Type":"ContainerStarted","Data":"21a672f118dcf94893b00f1322cb65af65cac14e09817daa183892f5ab319891"} Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.444732 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-6n9t7" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.501214 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" podStartSLOduration=124.501196675 podStartE2EDuration="2m4.501196675s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.464148737 +0000 UTC m=+144.899971244" watchObservedRunningTime="2025-12-04 09:40:45.501196675 +0000 UTC m=+144.937019182" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.511939 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:45 crc kubenswrapper[4707]: E1204 09:40:45.513363 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.013348111 +0000 UTC m=+145.449170618 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.540465 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9" podStartSLOduration=124.540442324 podStartE2EDuration="2m4.540442324s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.501688291 +0000 UTC m=+144.937510798" watchObservedRunningTime="2025-12-04 09:40:45.540442324 +0000 UTC m=+144.976264831" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.541565 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-gfbcs" podStartSLOduration=124.541557471 podStartE2EDuration="2m4.541557471s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.540940781 +0000 UTC m=+144.976763278" watchObservedRunningTime="2025-12-04 09:40:45.541557471 +0000 UTC m=+144.977379978" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.614872 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:45 crc kubenswrapper[4707]: E1204 09:40:45.617432 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.117416205 +0000 UTC m=+145.553238712 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.621771 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" podStartSLOduration=124.621751086 podStartE2EDuration="2m4.621751086s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.575576711 +0000 UTC m=+145.011399228" watchObservedRunningTime="2025-12-04 09:40:45.621751086 +0000 UTC m=+145.057573603" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.622153 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5j2vc" podStartSLOduration=124.622144669 podStartE2EDuration="2m4.622144669s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.620121583 +0000 UTC m=+145.055944100" watchObservedRunningTime="2025-12-04 09:40:45.622144669 +0000 UTC m=+145.057967176" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.637916 4707 patch_prober.go:28] interesting pod/router-default-5444994796-8hd98 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 09:40:45 crc kubenswrapper[4707]: [-]has-synced failed: reason withheld Dec 04 09:40:45 crc kubenswrapper[4707]: [+]process-running ok Dec 04 09:40:45 crc kubenswrapper[4707]: healthz check failed Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.637992 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8hd98" podUID="7647af53-61ba-409d-90c4-25d6ee0a022d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.652191 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-6n9t7" podStartSLOduration=10.652167138 podStartE2EDuration="10.652167138s" podCreationTimestamp="2025-12-04 09:40:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.650310898 +0000 UTC m=+145.086133425" watchObservedRunningTime="2025-12-04 09:40:45.652167138 +0000 UTC m=+145.087989645" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.687234 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" podStartSLOduration=124.687213581 podStartE2EDuration="2m4.687213581s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:45.68502664 +0000 UTC m=+145.120849157" watchObservedRunningTime="2025-12-04 09:40:45.687213581 +0000 UTC m=+145.123036088" Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.715963 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:45 crc kubenswrapper[4707]: E1204 09:40:45.716155 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.216131015 +0000 UTC m=+145.651953522 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.716327 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:45 crc kubenswrapper[4707]: E1204 09:40:45.716705 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.216698192 +0000 UTC m=+145.652520699 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.816852 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:45 crc kubenswrapper[4707]: E1204 09:40:45.817028 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.316998534 +0000 UTC m=+145.752821041 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:45 crc kubenswrapper[4707]: I1204 09:40:45.918402 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:45 crc kubenswrapper[4707]: E1204 09:40:45.918811 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.418789333 +0000 UTC m=+145.854612030 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.019744 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:46 crc kubenswrapper[4707]: E1204 09:40:46.019961 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.519935272 +0000 UTC m=+145.955757779 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.020112 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:46 crc kubenswrapper[4707]: E1204 09:40:46.020572 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.520556452 +0000 UTC m=+145.956379099 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.121224 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:46 crc kubenswrapper[4707]: E1204 09:40:46.121483 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.621440122 +0000 UTC m=+146.057262629 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.121873 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:46 crc kubenswrapper[4707]: E1204 09:40:46.122371 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.622357262 +0000 UTC m=+146.058179769 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.200488 4707 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-hm9cc container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.200599 4707 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" podUID="060fcdca-499b-4e84-81a2-144a175efe6f" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.34:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.222699 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:46 crc kubenswrapper[4707]: E1204 09:40:46.222980 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.722925872 +0000 UTC m=+146.158748379 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.223094 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:46 crc kubenswrapper[4707]: E1204 09:40:46.223471 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.723452349 +0000 UTC m=+146.159275046 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.295179 4707 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.324439 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:46 crc kubenswrapper[4707]: E1204 09:40:46.324633 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.824602127 +0000 UTC m=+146.260424634 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.325012 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:46 crc kubenswrapper[4707]: E1204 09:40:46.325581 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.825560599 +0000 UTC m=+146.261383106 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.425839 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:46 crc kubenswrapper[4707]: E1204 09:40:46.425992 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.925969964 +0000 UTC m=+146.361792471 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.426104 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:46 crc kubenswrapper[4707]: E1204 09:40:46.426432 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:46.926417669 +0000 UTC m=+146.362240316 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.477165 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" event={"ID":"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1","Type":"ContainerDied","Data":"d56192023c3786e4a0a996f48568c68fdda895ba4bcf0d3f7268cdda2fe3e9ee"} Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.477821 4707 generic.go:334] "Generic (PLEG): container finished" podID="109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1" containerID="d56192023c3786e4a0a996f48568c68fdda895ba4bcf0d3f7268cdda2fe3e9ee" exitCode=0 Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.483708 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gcf45"] Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.484936 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.487274 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.488040 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" event={"ID":"2868c6d2-127b-41f0-8d2a-c602d45c339f","Type":"ContainerStarted","Data":"07ebe9fb551597f929e70c183e790038a47300205f1d3a38b21c522d88a54ef9"} Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.488095 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" event={"ID":"2868c6d2-127b-41f0-8d2a-c602d45c339f","Type":"ContainerStarted","Data":"ce7f5668df767d100d54b67fdb8c4e3f2e3abe8e43a248ff691201936b62abbb"} Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.489253 4707 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-ncmcs container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.489306 4707 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" podUID="f0411515-ac32-4ad1-a956-ce737c8d0d75" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.494322 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hm9cc" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.499093 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n4mnv" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.515579 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gcf45"] Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.527257 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:46 crc kubenswrapper[4707]: E1204 09:40:46.527551 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 09:40:47.027514636 +0000 UTC m=+146.463337143 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.532128 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48dnx\" (UniqueName: \"kubernetes.io/projected/aad5050f-90b9-4364-9dc7-c32892d674d0-kube-api-access-48dnx\") pod \"certified-operators-gcf45\" (UID: \"aad5050f-90b9-4364-9dc7-c32892d674d0\") " pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.532269 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aad5050f-90b9-4364-9dc7-c32892d674d0-utilities\") pod \"certified-operators-gcf45\" (UID: \"aad5050f-90b9-4364-9dc7-c32892d674d0\") " pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.532678 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:46 crc kubenswrapper[4707]: E1204 09:40:46.548236 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 09:40:47.04821679 +0000 UTC m=+146.484039307 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n74dl" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.561892 4707 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-04T09:40:46.29522257Z","Handler":null,"Name":""} Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.574286 4707 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.574346 4707 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.585673 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aad5050f-90b9-4364-9dc7-c32892d674d0-catalog-content\") pod \"certified-operators-gcf45\" (UID: \"aad5050f-90b9-4364-9dc7-c32892d674d0\") " pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.631997 4707 patch_prober.go:28] interesting pod/router-default-5444994796-8hd98 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 09:40:46 crc kubenswrapper[4707]: [-]has-synced failed: reason withheld Dec 04 09:40:46 crc kubenswrapper[4707]: [+]process-running ok Dec 04 09:40:46 crc kubenswrapper[4707]: healthz check failed Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.632047 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8hd98" podUID="7647af53-61ba-409d-90c4-25d6ee0a022d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.696980 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.697216 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aad5050f-90b9-4364-9dc7-c32892d674d0-catalog-content\") pod \"certified-operators-gcf45\" (UID: \"aad5050f-90b9-4364-9dc7-c32892d674d0\") " pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.697251 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.697306 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48dnx\" (UniqueName: \"kubernetes.io/projected/aad5050f-90b9-4364-9dc7-c32892d674d0-kube-api-access-48dnx\") pod \"certified-operators-gcf45\" (UID: \"aad5050f-90b9-4364-9dc7-c32892d674d0\") " pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.697487 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qrqx7"] Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.697859 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aad5050f-90b9-4364-9dc7-c32892d674d0-utilities\") pod \"certified-operators-gcf45\" (UID: \"aad5050f-90b9-4364-9dc7-c32892d674d0\") " pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.698555 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.699914 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aad5050f-90b9-4364-9dc7-c32892d674d0-utilities\") pod \"certified-operators-gcf45\" (UID: \"aad5050f-90b9-4364-9dc7-c32892d674d0\") " pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.700033 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.700657 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.706782 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.710562 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aad5050f-90b9-4364-9dc7-c32892d674d0-catalog-content\") pod \"certified-operators-gcf45\" (UID: \"aad5050f-90b9-4364-9dc7-c32892d674d0\") " pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.717133 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.733945 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.753368 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48dnx\" (UniqueName: \"kubernetes.io/projected/aad5050f-90b9-4364-9dc7-c32892d674d0-kube-api-access-48dnx\") pod \"certified-operators-gcf45\" (UID: \"aad5050f-90b9-4364-9dc7-c32892d674d0\") " pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.769608 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qrqx7"] Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.803445 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ffc2219-3702-4f09-9511-145919595de9-utilities\") pod \"community-operators-qrqx7\" (UID: \"2ffc2219-3702-4f09-9511-145919595de9\") " pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.803545 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.803588 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.803632 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.803675 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ffc2219-3702-4f09-9511-145919595de9-catalog-content\") pod \"community-operators-qrqx7\" (UID: \"2ffc2219-3702-4f09-9511-145919595de9\") " pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.803711 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hdx7\" (UniqueName: \"kubernetes.io/projected/2ffc2219-3702-4f09-9511-145919595de9-kube-api-access-8hdx7\") pod \"community-operators-qrqx7\" (UID: \"2ffc2219-3702-4f09-9511-145919595de9\") " pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.807477 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.807848 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.812520 4707 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.812555 4707 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.812892 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.857241 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.869486 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.876575 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.888207 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cd4dt"] Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.891345 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.899797 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n74dl\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.904199 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cd4dt"] Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.905902 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ffc2219-3702-4f09-9511-145919595de9-catalog-content\") pod \"community-operators-qrqx7\" (UID: \"2ffc2219-3702-4f09-9511-145919595de9\") " pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.905936 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hdx7\" (UniqueName: \"kubernetes.io/projected/2ffc2219-3702-4f09-9511-145919595de9-kube-api-access-8hdx7\") pod \"community-operators-qrqx7\" (UID: \"2ffc2219-3702-4f09-9511-145919595de9\") " pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.905981 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ffc2219-3702-4f09-9511-145919595de9-utilities\") pod \"community-operators-qrqx7\" (UID: \"2ffc2219-3702-4f09-9511-145919595de9\") " pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.907126 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ffc2219-3702-4f09-9511-145919595de9-catalog-content\") pod \"community-operators-qrqx7\" (UID: \"2ffc2219-3702-4f09-9511-145919595de9\") " pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.917894 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ffc2219-3702-4f09-9511-145919595de9-utilities\") pod \"community-operators-qrqx7\" (UID: \"2ffc2219-3702-4f09-9511-145919595de9\") " pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.937379 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hdx7\" (UniqueName: \"kubernetes.io/projected/2ffc2219-3702-4f09-9511-145919595de9-kube-api-access-8hdx7\") pod \"community-operators-qrqx7\" (UID: \"2ffc2219-3702-4f09-9511-145919595de9\") " pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:40:46 crc kubenswrapper[4707]: I1204 09:40:46.975442 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.002747 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.008034 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48677f6e-8c16-480b-aad8-d87ffe093fca-catalog-content\") pod \"certified-operators-cd4dt\" (UID: \"48677f6e-8c16-480b-aad8-d87ffe093fca\") " pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.008124 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcvwf\" (UniqueName: \"kubernetes.io/projected/48677f6e-8c16-480b-aad8-d87ffe093fca-kube-api-access-vcvwf\") pod \"certified-operators-cd4dt\" (UID: \"48677f6e-8c16-480b-aad8-d87ffe093fca\") " pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.008172 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48677f6e-8c16-480b-aad8-d87ffe093fca-utilities\") pod \"certified-operators-cd4dt\" (UID: \"48677f6e-8c16-480b-aad8-d87ffe093fca\") " pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.077548 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.109609 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcvwf\" (UniqueName: \"kubernetes.io/projected/48677f6e-8c16-480b-aad8-d87ffe093fca-kube-api-access-vcvwf\") pod \"certified-operators-cd4dt\" (UID: \"48677f6e-8c16-480b-aad8-d87ffe093fca\") " pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.109672 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48677f6e-8c16-480b-aad8-d87ffe093fca-utilities\") pod \"certified-operators-cd4dt\" (UID: \"48677f6e-8c16-480b-aad8-d87ffe093fca\") " pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.109746 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48677f6e-8c16-480b-aad8-d87ffe093fca-catalog-content\") pod \"certified-operators-cd4dt\" (UID: \"48677f6e-8c16-480b-aad8-d87ffe093fca\") " pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.110887 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48677f6e-8c16-480b-aad8-d87ffe093fca-catalog-content\") pod \"certified-operators-cd4dt\" (UID: \"48677f6e-8c16-480b-aad8-d87ffe093fca\") " pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.111164 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48677f6e-8c16-480b-aad8-d87ffe093fca-utilities\") pod \"certified-operators-cd4dt\" (UID: \"48677f6e-8c16-480b-aad8-d87ffe093fca\") " pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.116360 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qmkrh"] Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.117459 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.130488 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qmkrh"] Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.160105 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcvwf\" (UniqueName: \"kubernetes.io/projected/48677f6e-8c16-480b-aad8-d87ffe093fca-kube-api-access-vcvwf\") pod \"certified-operators-cd4dt\" (UID: \"48677f6e-8c16-480b-aad8-d87ffe093fca\") " pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.211121 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ad7168e-fdaa-4830-b423-e981f1640f15-catalog-content\") pod \"community-operators-qmkrh\" (UID: \"9ad7168e-fdaa-4830-b423-e981f1640f15\") " pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.211570 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvljf\" (UniqueName: \"kubernetes.io/projected/9ad7168e-fdaa-4830-b423-e981f1640f15-kube-api-access-zvljf\") pod \"community-operators-qmkrh\" (UID: \"9ad7168e-fdaa-4830-b423-e981f1640f15\") " pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.211614 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ad7168e-fdaa-4830-b423-e981f1640f15-utilities\") pod \"community-operators-qmkrh\" (UID: \"9ad7168e-fdaa-4830-b423-e981f1640f15\") " pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.218672 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gcf45"] Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.218909 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.314148 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvljf\" (UniqueName: \"kubernetes.io/projected/9ad7168e-fdaa-4830-b423-e981f1640f15-kube-api-access-zvljf\") pod \"community-operators-qmkrh\" (UID: \"9ad7168e-fdaa-4830-b423-e981f1640f15\") " pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.314423 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ad7168e-fdaa-4830-b423-e981f1640f15-utilities\") pod \"community-operators-qmkrh\" (UID: \"9ad7168e-fdaa-4830-b423-e981f1640f15\") " pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.314446 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ad7168e-fdaa-4830-b423-e981f1640f15-catalog-content\") pod \"community-operators-qmkrh\" (UID: \"9ad7168e-fdaa-4830-b423-e981f1640f15\") " pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.314877 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ad7168e-fdaa-4830-b423-e981f1640f15-catalog-content\") pod \"community-operators-qmkrh\" (UID: \"9ad7168e-fdaa-4830-b423-e981f1640f15\") " pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.315158 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ad7168e-fdaa-4830-b423-e981f1640f15-utilities\") pod \"community-operators-qmkrh\" (UID: \"9ad7168e-fdaa-4830-b423-e981f1640f15\") " pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.337471 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvljf\" (UniqueName: \"kubernetes.io/projected/9ad7168e-fdaa-4830-b423-e981f1640f15-kube-api-access-zvljf\") pod \"community-operators-qmkrh\" (UID: \"9ad7168e-fdaa-4830-b423-e981f1640f15\") " pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.487813 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.516806 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n74dl"] Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.520949 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" event={"ID":"2868c6d2-127b-41f0-8d2a-c602d45c339f","Type":"ContainerStarted","Data":"0dfb7964e1dd64b00120ad3768689fa43cf391f171de908c344fc48b0f026d92"} Dec 04 09:40:47 crc kubenswrapper[4707]: W1204 09:40:47.539545 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44b581fc_38bf_4c33_820c_f27a4a730932.slice/crio-6ead001147904c65853d22c7744a4e52fe2b4eee2476ac07e40076809d8fe86d WatchSource:0}: Error finding container 6ead001147904c65853d22c7744a4e52fe2b4eee2476ac07e40076809d8fe86d: Status 404 returned error can't find the container with id 6ead001147904c65853d22c7744a4e52fe2b4eee2476ac07e40076809d8fe86d Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.552405 4707 generic.go:334] "Generic (PLEG): container finished" podID="aad5050f-90b9-4364-9dc7-c32892d674d0" containerID="1f1c2a4ca770a1414b95e79c232af81f65a4e62af622a3945b3ebbafba969e63" exitCode=0 Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.553027 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gcf45" event={"ID":"aad5050f-90b9-4364-9dc7-c32892d674d0","Type":"ContainerDied","Data":"1f1c2a4ca770a1414b95e79c232af81f65a4e62af622a3945b3ebbafba969e63"} Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.553065 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gcf45" event={"ID":"aad5050f-90b9-4364-9dc7-c32892d674d0","Type":"ContainerStarted","Data":"3a6952e310aebcea7e41d7576c896aae1be2425ab6457b73017ca8e5c7a2e477"} Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.555825 4707 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.604741 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-z9vbh" podStartSLOduration=12.604721225 podStartE2EDuration="12.604721225s" podCreationTimestamp="2025-12-04 09:40:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:47.578770559 +0000 UTC m=+147.014593066" watchObservedRunningTime="2025-12-04 09:40:47.604721225 +0000 UTC m=+147.040543732" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.638144 4707 patch_prober.go:28] interesting pod/router-default-5444994796-8hd98 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 09:40:47 crc kubenswrapper[4707]: [-]has-synced failed: reason withheld Dec 04 09:40:47 crc kubenswrapper[4707]: [+]process-running ok Dec 04 09:40:47 crc kubenswrapper[4707]: healthz check failed Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.638188 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8hd98" podUID="7647af53-61ba-409d-90c4-25d6ee0a022d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.766281 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cd4dt"] Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.775060 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qrqx7"] Dec 04 09:40:47 crc kubenswrapper[4707]: I1204 09:40:47.957244 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.035353 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-secret-volume\") pod \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\" (UID: \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\") " Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.035432 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nt9t4\" (UniqueName: \"kubernetes.io/projected/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-kube-api-access-nt9t4\") pod \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\" (UID: \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\") " Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.035465 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-config-volume\") pod \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\" (UID: \"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1\") " Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.036599 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-config-volume" (OuterVolumeSpecName: "config-volume") pod "109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1" (UID: "109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.044981 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1" (UID: "109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.048980 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-kube-api-access-nt9t4" (OuterVolumeSpecName: "kube-api-access-nt9t4") pod "109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1" (UID: "109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1"). InnerVolumeSpecName "kube-api-access-nt9t4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.049567 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qmkrh"] Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.136571 4707 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.136624 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nt9t4\" (UniqueName: \"kubernetes.io/projected/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-kube-api-access-nt9t4\") on node \"crc\" DevicePath \"\"" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.136647 4707 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 09:40:48 crc kubenswrapper[4707]: W1204 09:40:48.140377 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ad7168e_fdaa_4830_b423_e981f1640f15.slice/crio-7a8d539cf644523ac18b5756478ad23f7df6a9aaecfc5554c3050dbf21b2c874 WatchSource:0}: Error finding container 7a8d539cf644523ac18b5756478ad23f7df6a9aaecfc5554c3050dbf21b2c874: Status 404 returned error can't find the container with id 7a8d539cf644523ac18b5756478ad23f7df6a9aaecfc5554c3050dbf21b2c874 Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.227704 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.227798 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.227940 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.228958 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.229647 4707 patch_prober.go:28] interesting pod/console-f9d7485db-56ncv container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.229691 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-56ncv" podUID="3b305e45-7c10-459e-b8ed-1192baa0b469" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.230705 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.230766 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.231093 4707 patch_prober.go:28] interesting pod/downloads-7954f5f757-g47v2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.231126 4707 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-g47v2" podUID="0436692e-40df-4130-8bd1-2059aeeeac11" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.232028 4707 patch_prober.go:28] interesting pod/downloads-7954f5f757-g47v2 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.232091 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-g47v2" podUID="0436692e-40df-4130-8bd1-2059aeeeac11" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.240222 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.240433 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.350498 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 04 09:40:48 crc kubenswrapper[4707]: E1204 09:40:48.351118 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1" containerName="collect-profiles" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.351162 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1" containerName="collect-profiles" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.351298 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1" containerName="collect-profiles" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.351863 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.355449 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.355667 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.366863 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.446766 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/56f00256-911d-4475-a732-8929370dd596-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"56f00256-911d-4475-a732-8929370dd596\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.446970 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/56f00256-911d-4475-a732-8929370dd596-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"56f00256-911d-4475-a732-8929370dd596\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.476114 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-57t67"] Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.477437 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.482486 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.487201 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-57t67"] Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.554227 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/56f00256-911d-4475-a732-8929370dd596-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"56f00256-911d-4475-a732-8929370dd596\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.554311 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/56f00256-911d-4475-a732-8929370dd596-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"56f00256-911d-4475-a732-8929370dd596\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.554377 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dee25bc0-3766-43d6-8dde-8d316c48bd04-catalog-content\") pod \"redhat-marketplace-57t67\" (UID: \"dee25bc0-3766-43d6-8dde-8d316c48bd04\") " pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.554410 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z99dc\" (UniqueName: \"kubernetes.io/projected/dee25bc0-3766-43d6-8dde-8d316c48bd04-kube-api-access-z99dc\") pod \"redhat-marketplace-57t67\" (UID: \"dee25bc0-3766-43d6-8dde-8d316c48bd04\") " pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.554439 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dee25bc0-3766-43d6-8dde-8d316c48bd04-utilities\") pod \"redhat-marketplace-57t67\" (UID: \"dee25bc0-3766-43d6-8dde-8d316c48bd04\") " pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.554846 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/56f00256-911d-4475-a732-8929370dd596-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"56f00256-911d-4475-a732-8929370dd596\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.567625 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"b3a9e40a987b05f1cc32466c4ec2557a9f5821e7063f9e476f601f3e1eccbac4"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.567677 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"02eb68057fac5f4c87d49ade183e1e7aa679c5d10cdbc1938fca979b6c28443a"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.589686 4707 generic.go:334] "Generic (PLEG): container finished" podID="2ffc2219-3702-4f09-9511-145919595de9" containerID="f71212486bf82a515b68ed0139d0f6904f1aad9af266d6294399968a198b4be3" exitCode=0 Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.590839 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrqx7" event={"ID":"2ffc2219-3702-4f09-9511-145919595de9","Type":"ContainerDied","Data":"f71212486bf82a515b68ed0139d0f6904f1aad9af266d6294399968a198b4be3"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.590874 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrqx7" event={"ID":"2ffc2219-3702-4f09-9511-145919595de9","Type":"ContainerStarted","Data":"861beda45abce154c5955d4ad718c75a36b2a3e3d6d0ddb2c0fcd8ea272c59bd"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.600984 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/56f00256-911d-4475-a732-8929370dd596-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"56f00256-911d-4475-a732-8929370dd596\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.604446 4707 generic.go:334] "Generic (PLEG): container finished" podID="48677f6e-8c16-480b-aad8-d87ffe093fca" containerID="8f2bf6398dd1c674a546fe3cd1f89f838cea8da10c0a9e1063765824451ebe46" exitCode=0 Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.604559 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cd4dt" event={"ID":"48677f6e-8c16-480b-aad8-d87ffe093fca","Type":"ContainerDied","Data":"8f2bf6398dd1c674a546fe3cd1f89f838cea8da10c0a9e1063765824451ebe46"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.604594 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cd4dt" event={"ID":"48677f6e-8c16-480b-aad8-d87ffe093fca","Type":"ContainerStarted","Data":"99ed92869884146e5a1025d2c434183f2571dd9641d5e7147704599273637e88"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.609968 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"ebb8b52c4ed45cfb290b52a4dcbaaba75074e67c2806a5d4bf28d81feaca96a3"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.610262 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"8b8abfc3bc6ccdfef45a93e7a0e4d8869e9e6134227989d8963861f8a9217e5c"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.620637 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"abbbfdb825201cc947aad6d093d955e6a9780c6863b90f54ba429c5ddfd165fb"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.623187 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"ee07cc59d64a5269d61afae382f948e72e0da94f33733c987e8e049bbba10e7b"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.623996 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.631490 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.637132 4707 patch_prober.go:28] interesting pod/router-default-5444994796-8hd98 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 09:40:48 crc kubenswrapper[4707]: [-]has-synced failed: reason withheld Dec 04 09:40:48 crc kubenswrapper[4707]: [+]process-running ok Dec 04 09:40:48 crc kubenswrapper[4707]: healthz check failed Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.637194 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8hd98" podUID="7647af53-61ba-409d-90c4-25d6ee0a022d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.642876 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" event={"ID":"44b581fc-38bf-4c33-820c-f27a4a730932","Type":"ContainerStarted","Data":"7279ac8ca0db88564285e726136750e8fb46a021023072d398b9e25ac77c186e"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.642934 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" event={"ID":"44b581fc-38bf-4c33-820c-f27a4a730932","Type":"ContainerStarted","Data":"6ead001147904c65853d22c7744a4e52fe2b4eee2476ac07e40076809d8fe86d"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.643756 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.649946 4707 generic.go:334] "Generic (PLEG): container finished" podID="9ad7168e-fdaa-4830-b423-e981f1640f15" containerID="7b6f551b73eac7c0f462fb6104fb5fafa41f25d3f7b8c94cb4208b9b3afd56e4" exitCode=0 Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.650029 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qmkrh" event={"ID":"9ad7168e-fdaa-4830-b423-e981f1640f15","Type":"ContainerDied","Data":"7b6f551b73eac7c0f462fb6104fb5fafa41f25d3f7b8c94cb4208b9b3afd56e4"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.650060 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qmkrh" event={"ID":"9ad7168e-fdaa-4830-b423-e981f1640f15","Type":"ContainerStarted","Data":"7a8d539cf644523ac18b5756478ad23f7df6a9aaecfc5554c3050dbf21b2c874"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.655388 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dee25bc0-3766-43d6-8dde-8d316c48bd04-catalog-content\") pod \"redhat-marketplace-57t67\" (UID: \"dee25bc0-3766-43d6-8dde-8d316c48bd04\") " pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.655467 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z99dc\" (UniqueName: \"kubernetes.io/projected/dee25bc0-3766-43d6-8dde-8d316c48bd04-kube-api-access-z99dc\") pod \"redhat-marketplace-57t67\" (UID: \"dee25bc0-3766-43d6-8dde-8d316c48bd04\") " pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.655992 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dee25bc0-3766-43d6-8dde-8d316c48bd04-utilities\") pod \"redhat-marketplace-57t67\" (UID: \"dee25bc0-3766-43d6-8dde-8d316c48bd04\") " pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.657644 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dee25bc0-3766-43d6-8dde-8d316c48bd04-catalog-content\") pod \"redhat-marketplace-57t67\" (UID: \"dee25bc0-3766-43d6-8dde-8d316c48bd04\") " pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.657795 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dee25bc0-3766-43d6-8dde-8d316c48bd04-utilities\") pod \"redhat-marketplace-57t67\" (UID: \"dee25bc0-3766-43d6-8dde-8d316c48bd04\") " pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.657845 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" event={"ID":"109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1","Type":"ContainerDied","Data":"def3b68efad02b092e71ed3b8828b86ba2c19805c46958892ff33965e554b5bd"} Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.657878 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="def3b68efad02b092e71ed3b8828b86ba2c19805c46958892ff33965e554b5bd" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.661541 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.670474 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-chj9t" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.670774 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-7th45" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.690423 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z99dc\" (UniqueName: \"kubernetes.io/projected/dee25bc0-3766-43d6-8dde-8d316c48bd04-kube-api-access-z99dc\") pod \"redhat-marketplace-57t67\" (UID: \"dee25bc0-3766-43d6-8dde-8d316c48bd04\") " pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.690986 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.715172 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" podStartSLOduration=127.715153358 podStartE2EDuration="2m7.715153358s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:48.711807209 +0000 UTC m=+148.147629716" watchObservedRunningTime="2025-12-04 09:40:48.715153358 +0000 UTC m=+148.150975865" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.803542 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.891824 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vrkjm"] Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.893479 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.912033 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrkjm"] Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.968291 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e905028-4918-4514-a35a-74f753b746ab-catalog-content\") pod \"redhat-marketplace-vrkjm\" (UID: \"4e905028-4918-4514-a35a-74f753b746ab\") " pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.968430 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrnpf\" (UniqueName: \"kubernetes.io/projected/4e905028-4918-4514-a35a-74f753b746ab-kube-api-access-vrnpf\") pod \"redhat-marketplace-vrkjm\" (UID: \"4e905028-4918-4514-a35a-74f753b746ab\") " pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:40:48 crc kubenswrapper[4707]: I1204 09:40:48.968528 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e905028-4918-4514-a35a-74f753b746ab-utilities\") pod \"redhat-marketplace-vrkjm\" (UID: \"4e905028-4918-4514-a35a-74f753b746ab\") " pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.070493 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e905028-4918-4514-a35a-74f753b746ab-utilities\") pod \"redhat-marketplace-vrkjm\" (UID: \"4e905028-4918-4514-a35a-74f753b746ab\") " pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.070646 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e905028-4918-4514-a35a-74f753b746ab-catalog-content\") pod \"redhat-marketplace-vrkjm\" (UID: \"4e905028-4918-4514-a35a-74f753b746ab\") " pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.070750 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrnpf\" (UniqueName: \"kubernetes.io/projected/4e905028-4918-4514-a35a-74f753b746ab-kube-api-access-vrnpf\") pod \"redhat-marketplace-vrkjm\" (UID: \"4e905028-4918-4514-a35a-74f753b746ab\") " pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.071081 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e905028-4918-4514-a35a-74f753b746ab-utilities\") pod \"redhat-marketplace-vrkjm\" (UID: \"4e905028-4918-4514-a35a-74f753b746ab\") " pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.071194 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e905028-4918-4514-a35a-74f753b746ab-catalog-content\") pod \"redhat-marketplace-vrkjm\" (UID: \"4e905028-4918-4514-a35a-74f753b746ab\") " pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.102478 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrnpf\" (UniqueName: \"kubernetes.io/projected/4e905028-4918-4514-a35a-74f753b746ab-kube-api-access-vrnpf\") pod \"redhat-marketplace-vrkjm\" (UID: \"4e905028-4918-4514-a35a-74f753b746ab\") " pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.120739 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 04 09:40:49 crc kubenswrapper[4707]: W1204 09:40:49.146813 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod56f00256_911d_4475_a732_8929370dd596.slice/crio-11c228ed8caef0aabd43c41a809e8310fc7a89d91dd98e82ef2231f5d91b59b7 WatchSource:0}: Error finding container 11c228ed8caef0aabd43c41a809e8310fc7a89d91dd98e82ef2231f5d91b59b7: Status 404 returned error can't find the container with id 11c228ed8caef0aabd43c41a809e8310fc7a89d91dd98e82ef2231f5d91b59b7 Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.225768 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-57t67"] Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.237456 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.360867 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.635627 4707 patch_prober.go:28] interesting pod/router-default-5444994796-8hd98 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 09:40:49 crc kubenswrapper[4707]: [-]has-synced failed: reason withheld Dec 04 09:40:49 crc kubenswrapper[4707]: [+]process-running ok Dec 04 09:40:49 crc kubenswrapper[4707]: healthz check failed Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.636162 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8hd98" podUID="7647af53-61ba-409d-90c4-25d6ee0a022d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.682023 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wnkkq"] Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.683654 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.686309 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.692106 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"56f00256-911d-4475-a732-8929370dd596","Type":"ContainerStarted","Data":"11c228ed8caef0aabd43c41a809e8310fc7a89d91dd98e82ef2231f5d91b59b7"} Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.702831 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wnkkq"] Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.709793 4707 generic.go:334] "Generic (PLEG): container finished" podID="dee25bc0-3766-43d6-8dde-8d316c48bd04" containerID="b70f6efeca4b03e6e62398fd2c26c3dd7ac0ce54b2b6106db3c26828ad2f7d0c" exitCode=0 Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.710052 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57t67" event={"ID":"dee25bc0-3766-43d6-8dde-8d316c48bd04","Type":"ContainerDied","Data":"b70f6efeca4b03e6e62398fd2c26c3dd7ac0ce54b2b6106db3c26828ad2f7d0c"} Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.710121 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57t67" event={"ID":"dee25bc0-3766-43d6-8dde-8d316c48bd04","Type":"ContainerStarted","Data":"4c9fecffa592a2c4290ac9cebeb8f8d472207e3b4fb61e18e50599b3f7e41db7"} Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.756797 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrkjm"] Dec 04 09:40:49 crc kubenswrapper[4707]: W1204 09:40:49.772209 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4e905028_4918_4514_a35a_74f753b746ab.slice/crio-a77c8ebd9220c34c37215d40c1592621a3cfa42974349100457794a876a782f8 WatchSource:0}: Error finding container a77c8ebd9220c34c37215d40c1592621a3cfa42974349100457794a876a782f8: Status 404 returned error can't find the container with id a77c8ebd9220c34c37215d40c1592621a3cfa42974349100457794a876a782f8 Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.804846 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68w7p\" (UniqueName: \"kubernetes.io/projected/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-kube-api-access-68w7p\") pod \"redhat-operators-wnkkq\" (UID: \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\") " pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.804900 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-catalog-content\") pod \"redhat-operators-wnkkq\" (UID: \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\") " pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.805113 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-utilities\") pod \"redhat-operators-wnkkq\" (UID: \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\") " pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.906260 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-utilities\") pod \"redhat-operators-wnkkq\" (UID: \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\") " pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.906325 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68w7p\" (UniqueName: \"kubernetes.io/projected/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-kube-api-access-68w7p\") pod \"redhat-operators-wnkkq\" (UID: \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\") " pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.906367 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-catalog-content\") pod \"redhat-operators-wnkkq\" (UID: \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\") " pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.906747 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-catalog-content\") pod \"redhat-operators-wnkkq\" (UID: \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\") " pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.907759 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-utilities\") pod \"redhat-operators-wnkkq\" (UID: \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\") " pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:40:49 crc kubenswrapper[4707]: I1204 09:40:49.931772 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68w7p\" (UniqueName: \"kubernetes.io/projected/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-kube-api-access-68w7p\") pod \"redhat-operators-wnkkq\" (UID: \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\") " pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.031056 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.078286 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5t5hm"] Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.079275 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.098296 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5t5hm"] Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.210130 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bca5523-13da-4a24-8436-3780fe794fd4-catalog-content\") pod \"redhat-operators-5t5hm\" (UID: \"9bca5523-13da-4a24-8436-3780fe794fd4\") " pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.210555 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fwb9\" (UniqueName: \"kubernetes.io/projected/9bca5523-13da-4a24-8436-3780fe794fd4-kube-api-access-9fwb9\") pod \"redhat-operators-5t5hm\" (UID: \"9bca5523-13da-4a24-8436-3780fe794fd4\") " pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.210603 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bca5523-13da-4a24-8436-3780fe794fd4-utilities\") pod \"redhat-operators-5t5hm\" (UID: \"9bca5523-13da-4a24-8436-3780fe794fd4\") " pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.312376 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fwb9\" (UniqueName: \"kubernetes.io/projected/9bca5523-13da-4a24-8436-3780fe794fd4-kube-api-access-9fwb9\") pod \"redhat-operators-5t5hm\" (UID: \"9bca5523-13da-4a24-8436-3780fe794fd4\") " pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.312452 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bca5523-13da-4a24-8436-3780fe794fd4-utilities\") pod \"redhat-operators-5t5hm\" (UID: \"9bca5523-13da-4a24-8436-3780fe794fd4\") " pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.312535 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bca5523-13da-4a24-8436-3780fe794fd4-catalog-content\") pod \"redhat-operators-5t5hm\" (UID: \"9bca5523-13da-4a24-8436-3780fe794fd4\") " pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.313509 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bca5523-13da-4a24-8436-3780fe794fd4-catalog-content\") pod \"redhat-operators-5t5hm\" (UID: \"9bca5523-13da-4a24-8436-3780fe794fd4\") " pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.313680 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bca5523-13da-4a24-8436-3780fe794fd4-utilities\") pod \"redhat-operators-5t5hm\" (UID: \"9bca5523-13da-4a24-8436-3780fe794fd4\") " pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.338790 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fwb9\" (UniqueName: \"kubernetes.io/projected/9bca5523-13da-4a24-8436-3780fe794fd4-kube-api-access-9fwb9\") pod \"redhat-operators-5t5hm\" (UID: \"9bca5523-13da-4a24-8436-3780fe794fd4\") " pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.366022 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wnkkq"] Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.409299 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.631433 4707 patch_prober.go:28] interesting pod/router-default-5444994796-8hd98 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 09:40:50 crc kubenswrapper[4707]: [-]has-synced failed: reason withheld Dec 04 09:40:50 crc kubenswrapper[4707]: [+]process-running ok Dec 04 09:40:50 crc kubenswrapper[4707]: healthz check failed Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.631862 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8hd98" podUID="7647af53-61ba-409d-90c4-25d6ee0a022d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.671472 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.682326 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.685362 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.687302 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.687468 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.724247 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bd10c7d6-74d9-4609-bdc3-01bb62ef242a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bd10c7d6-74d9-4609-bdc3-01bb62ef242a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.724310 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bd10c7d6-74d9-4609-bdc3-01bb62ef242a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bd10c7d6-74d9-4609-bdc3-01bb62ef242a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.755723 4707 generic.go:334] "Generic (PLEG): container finished" podID="579805ef-aec3-4ea7-b5af-bdf514c7eb1f" containerID="b42536a3c54bcff747a011758f65f926a400d3165d9ec1cb05b63244bb603518" exitCode=0 Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.755829 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wnkkq" event={"ID":"579805ef-aec3-4ea7-b5af-bdf514c7eb1f","Type":"ContainerDied","Data":"b42536a3c54bcff747a011758f65f926a400d3165d9ec1cb05b63244bb603518"} Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.755881 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wnkkq" event={"ID":"579805ef-aec3-4ea7-b5af-bdf514c7eb1f","Type":"ContainerStarted","Data":"d73e742e0c49807694b5b1340419b92b6d824c9db2d7d8d696f261fba97d4835"} Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.782351 4707 generic.go:334] "Generic (PLEG): container finished" podID="4e905028-4918-4514-a35a-74f753b746ab" containerID="904c2ea2582cec75413d57d6ddc0a39b85f526c0277c35f36f2f1a5e95d19f49" exitCode=0 Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.782494 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrkjm" event={"ID":"4e905028-4918-4514-a35a-74f753b746ab","Type":"ContainerDied","Data":"904c2ea2582cec75413d57d6ddc0a39b85f526c0277c35f36f2f1a5e95d19f49"} Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.782533 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrkjm" event={"ID":"4e905028-4918-4514-a35a-74f753b746ab","Type":"ContainerStarted","Data":"a77c8ebd9220c34c37215d40c1592621a3cfa42974349100457794a876a782f8"} Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.790750 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"56f00256-911d-4475-a732-8929370dd596","Type":"ContainerStarted","Data":"3fa838470fabab184523125cc0d1fd8e6746a7b49854153126be28efe6400f73"} Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.825601 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bd10c7d6-74d9-4609-bdc3-01bb62ef242a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bd10c7d6-74d9-4609-bdc3-01bb62ef242a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.825700 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bd10c7d6-74d9-4609-bdc3-01bb62ef242a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bd10c7d6-74d9-4609-bdc3-01bb62ef242a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.826441 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bd10c7d6-74d9-4609-bdc3-01bb62ef242a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bd10c7d6-74d9-4609-bdc3-01bb62ef242a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.826629 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.826604897 podStartE2EDuration="2.826604897s" podCreationTimestamp="2025-12-04 09:40:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:50.823745763 +0000 UTC m=+150.259568280" watchObservedRunningTime="2025-12-04 09:40:50.826604897 +0000 UTC m=+150.262427404" Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.839842 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5t5hm"] Dec 04 09:40:50 crc kubenswrapper[4707]: I1204 09:40:50.844169 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bd10c7d6-74d9-4609-bdc3-01bb62ef242a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bd10c7d6-74d9-4609-bdc3-01bb62ef242a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 09:40:50 crc kubenswrapper[4707]: W1204 09:40:50.889444 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9bca5523_13da_4a24_8436_3780fe794fd4.slice/crio-813048458fb6a78807fc3f14e4214a0c6ff28c3b391f7ab24b6717a8d369997c WatchSource:0}: Error finding container 813048458fb6a78807fc3f14e4214a0c6ff28c3b391f7ab24b6717a8d369997c: Status 404 returned error can't find the container with id 813048458fb6a78807fc3f14e4214a0c6ff28c3b391f7ab24b6717a8d369997c Dec 04 09:40:51 crc kubenswrapper[4707]: I1204 09:40:51.033227 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 09:40:51 crc kubenswrapper[4707]: I1204 09:40:51.357161 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 04 09:40:51 crc kubenswrapper[4707]: I1204 09:40:51.631468 4707 patch_prober.go:28] interesting pod/router-default-5444994796-8hd98 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 09:40:51 crc kubenswrapper[4707]: [-]has-synced failed: reason withheld Dec 04 09:40:51 crc kubenswrapper[4707]: [+]process-running ok Dec 04 09:40:51 crc kubenswrapper[4707]: healthz check failed Dec 04 09:40:51 crc kubenswrapper[4707]: I1204 09:40:51.632047 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8hd98" podUID="7647af53-61ba-409d-90c4-25d6ee0a022d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 09:40:51 crc kubenswrapper[4707]: I1204 09:40:51.798422 4707 generic.go:334] "Generic (PLEG): container finished" podID="56f00256-911d-4475-a732-8929370dd596" containerID="3fa838470fabab184523125cc0d1fd8e6746a7b49854153126be28efe6400f73" exitCode=0 Dec 04 09:40:51 crc kubenswrapper[4707]: I1204 09:40:51.798482 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"56f00256-911d-4475-a732-8929370dd596","Type":"ContainerDied","Data":"3fa838470fabab184523125cc0d1fd8e6746a7b49854153126be28efe6400f73"} Dec 04 09:40:51 crc kubenswrapper[4707]: I1204 09:40:51.801551 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5t5hm" event={"ID":"9bca5523-13da-4a24-8436-3780fe794fd4","Type":"ContainerStarted","Data":"c4f5126591fee1291ae492da38cb83864f93efa4a244cec9c34b8d1e543e06f8"} Dec 04 09:40:51 crc kubenswrapper[4707]: I1204 09:40:51.801580 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5t5hm" event={"ID":"9bca5523-13da-4a24-8436-3780fe794fd4","Type":"ContainerStarted","Data":"813048458fb6a78807fc3f14e4214a0c6ff28c3b391f7ab24b6717a8d369997c"} Dec 04 09:40:51 crc kubenswrapper[4707]: I1204 09:40:51.807751 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"bd10c7d6-74d9-4609-bdc3-01bb62ef242a","Type":"ContainerStarted","Data":"c81d448e903ee11e8972d41b6703ecad92bf18a8962caf7e6d17cf3039bf944b"} Dec 04 09:40:52 crc kubenswrapper[4707]: I1204 09:40:52.629657 4707 patch_prober.go:28] interesting pod/router-default-5444994796-8hd98 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 09:40:52 crc kubenswrapper[4707]: [-]has-synced failed: reason withheld Dec 04 09:40:52 crc kubenswrapper[4707]: [+]process-running ok Dec 04 09:40:52 crc kubenswrapper[4707]: healthz check failed Dec 04 09:40:52 crc kubenswrapper[4707]: I1204 09:40:52.629928 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8hd98" podUID="7647af53-61ba-409d-90c4-25d6ee0a022d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 09:40:52 crc kubenswrapper[4707]: I1204 09:40:52.816737 4707 generic.go:334] "Generic (PLEG): container finished" podID="9bca5523-13da-4a24-8436-3780fe794fd4" containerID="c4f5126591fee1291ae492da38cb83864f93efa4a244cec9c34b8d1e543e06f8" exitCode=0 Dec 04 09:40:52 crc kubenswrapper[4707]: I1204 09:40:52.816790 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5t5hm" event={"ID":"9bca5523-13da-4a24-8436-3780fe794fd4","Type":"ContainerDied","Data":"c4f5126591fee1291ae492da38cb83864f93efa4a244cec9c34b8d1e543e06f8"} Dec 04 09:40:52 crc kubenswrapper[4707]: I1204 09:40:52.819398 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"bd10c7d6-74d9-4609-bdc3-01bb62ef242a","Type":"ContainerStarted","Data":"25f1cd6c6cf56e983598148489585ef25aeba801225eb1ab12c0a22246ee1455"} Dec 04 09:40:52 crc kubenswrapper[4707]: I1204 09:40:52.916196 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.916180302 podStartE2EDuration="2.916180302s" podCreationTimestamp="2025-12-04 09:40:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:40:52.913021779 +0000 UTC m=+152.348844286" watchObservedRunningTime="2025-12-04 09:40:52.916180302 +0000 UTC m=+152.352002809" Dec 04 09:40:53 crc kubenswrapper[4707]: I1204 09:40:53.186790 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 09:40:53 crc kubenswrapper[4707]: I1204 09:40:53.270903 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/56f00256-911d-4475-a732-8929370dd596-kubelet-dir\") pod \"56f00256-911d-4475-a732-8929370dd596\" (UID: \"56f00256-911d-4475-a732-8929370dd596\") " Dec 04 09:40:53 crc kubenswrapper[4707]: I1204 09:40:53.271236 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/56f00256-911d-4475-a732-8929370dd596-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "56f00256-911d-4475-a732-8929370dd596" (UID: "56f00256-911d-4475-a732-8929370dd596"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:40:53 crc kubenswrapper[4707]: I1204 09:40:53.271058 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/56f00256-911d-4475-a732-8929370dd596-kube-api-access\") pod \"56f00256-911d-4475-a732-8929370dd596\" (UID: \"56f00256-911d-4475-a732-8929370dd596\") " Dec 04 09:40:53 crc kubenswrapper[4707]: I1204 09:40:53.272833 4707 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/56f00256-911d-4475-a732-8929370dd596-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 04 09:40:53 crc kubenswrapper[4707]: I1204 09:40:53.280348 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56f00256-911d-4475-a732-8929370dd596-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "56f00256-911d-4475-a732-8929370dd596" (UID: "56f00256-911d-4475-a732-8929370dd596"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:40:53 crc kubenswrapper[4707]: I1204 09:40:53.374707 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/56f00256-911d-4475-a732-8929370dd596-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 09:40:53 crc kubenswrapper[4707]: I1204 09:40:53.631044 4707 patch_prober.go:28] interesting pod/router-default-5444994796-8hd98 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 09:40:53 crc kubenswrapper[4707]: [-]has-synced failed: reason withheld Dec 04 09:40:53 crc kubenswrapper[4707]: [+]process-running ok Dec 04 09:40:53 crc kubenswrapper[4707]: healthz check failed Dec 04 09:40:53 crc kubenswrapper[4707]: I1204 09:40:53.631454 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8hd98" podUID="7647af53-61ba-409d-90c4-25d6ee0a022d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 09:40:53 crc kubenswrapper[4707]: I1204 09:40:53.834158 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"56f00256-911d-4475-a732-8929370dd596","Type":"ContainerDied","Data":"11c228ed8caef0aabd43c41a809e8310fc7a89d91dd98e82ef2231f5d91b59b7"} Dec 04 09:40:53 crc kubenswrapper[4707]: I1204 09:40:53.834214 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11c228ed8caef0aabd43c41a809e8310fc7a89d91dd98e82ef2231f5d91b59b7" Dec 04 09:40:53 crc kubenswrapper[4707]: I1204 09:40:53.834220 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 09:40:53 crc kubenswrapper[4707]: I1204 09:40:53.836721 4707 generic.go:334] "Generic (PLEG): container finished" podID="bd10c7d6-74d9-4609-bdc3-01bb62ef242a" containerID="25f1cd6c6cf56e983598148489585ef25aeba801225eb1ab12c0a22246ee1455" exitCode=0 Dec 04 09:40:53 crc kubenswrapper[4707]: I1204 09:40:53.836785 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"bd10c7d6-74d9-4609-bdc3-01bb62ef242a","Type":"ContainerDied","Data":"25f1cd6c6cf56e983598148489585ef25aeba801225eb1ab12c0a22246ee1455"} Dec 04 09:40:54 crc kubenswrapper[4707]: I1204 09:40:54.098626 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-6n9t7" Dec 04 09:40:54 crc kubenswrapper[4707]: I1204 09:40:54.630209 4707 patch_prober.go:28] interesting pod/router-default-5444994796-8hd98 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 09:40:54 crc kubenswrapper[4707]: [-]has-synced failed: reason withheld Dec 04 09:40:54 crc kubenswrapper[4707]: [+]process-running ok Dec 04 09:40:54 crc kubenswrapper[4707]: healthz check failed Dec 04 09:40:54 crc kubenswrapper[4707]: I1204 09:40:54.630281 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8hd98" podUID="7647af53-61ba-409d-90c4-25d6ee0a022d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 09:40:55 crc kubenswrapper[4707]: I1204 09:40:55.086751 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 09:40:55 crc kubenswrapper[4707]: I1204 09:40:55.204887 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bd10c7d6-74d9-4609-bdc3-01bb62ef242a-kubelet-dir\") pod \"bd10c7d6-74d9-4609-bdc3-01bb62ef242a\" (UID: \"bd10c7d6-74d9-4609-bdc3-01bb62ef242a\") " Dec 04 09:40:55 crc kubenswrapper[4707]: I1204 09:40:55.204994 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bd10c7d6-74d9-4609-bdc3-01bb62ef242a-kube-api-access\") pod \"bd10c7d6-74d9-4609-bdc3-01bb62ef242a\" (UID: \"bd10c7d6-74d9-4609-bdc3-01bb62ef242a\") " Dec 04 09:40:55 crc kubenswrapper[4707]: I1204 09:40:55.204992 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bd10c7d6-74d9-4609-bdc3-01bb62ef242a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "bd10c7d6-74d9-4609-bdc3-01bb62ef242a" (UID: "bd10c7d6-74d9-4609-bdc3-01bb62ef242a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:40:55 crc kubenswrapper[4707]: I1204 09:40:55.205545 4707 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bd10c7d6-74d9-4609-bdc3-01bb62ef242a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 04 09:40:55 crc kubenswrapper[4707]: I1204 09:40:55.210530 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd10c7d6-74d9-4609-bdc3-01bb62ef242a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "bd10c7d6-74d9-4609-bdc3-01bb62ef242a" (UID: "bd10c7d6-74d9-4609-bdc3-01bb62ef242a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:40:55 crc kubenswrapper[4707]: I1204 09:40:55.307431 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bd10c7d6-74d9-4609-bdc3-01bb62ef242a-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 09:40:55 crc kubenswrapper[4707]: I1204 09:40:55.629654 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:55 crc kubenswrapper[4707]: I1204 09:40:55.633852 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-8hd98" Dec 04 09:40:55 crc kubenswrapper[4707]: I1204 09:40:55.854015 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 09:40:55 crc kubenswrapper[4707]: I1204 09:40:55.854283 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"bd10c7d6-74d9-4609-bdc3-01bb62ef242a","Type":"ContainerDied","Data":"c81d448e903ee11e8972d41b6703ecad92bf18a8962caf7e6d17cf3039bf944b"} Dec 04 09:40:55 crc kubenswrapper[4707]: I1204 09:40:55.854405 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c81d448e903ee11e8972d41b6703ecad92bf18a8962caf7e6d17cf3039bf944b" Dec 04 09:40:58 crc kubenswrapper[4707]: I1204 09:40:58.241778 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-g47v2" Dec 04 09:40:58 crc kubenswrapper[4707]: I1204 09:40:58.374724 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:40:58 crc kubenswrapper[4707]: I1204 09:40:58.378404 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-56ncv" Dec 04 09:41:00 crc kubenswrapper[4707]: I1204 09:41:00.817408 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:41:00 crc kubenswrapper[4707]: I1204 09:41:00.818049 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:41:03 crc kubenswrapper[4707]: I1204 09:41:03.434649 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs\") pod \"network-metrics-daemon-txkn2\" (UID: \"9a8009fd-d652-44fb-8ef1-73078262e8fa\") " pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:41:03 crc kubenswrapper[4707]: I1204 09:41:03.440384 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a8009fd-d652-44fb-8ef1-73078262e8fa-metrics-certs\") pod \"network-metrics-daemon-txkn2\" (UID: \"9a8009fd-d652-44fb-8ef1-73078262e8fa\") " pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:41:03 crc kubenswrapper[4707]: I1204 09:41:03.467378 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-txkn2" Dec 04 09:41:07 crc kubenswrapper[4707]: I1204 09:41:07.010677 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:41:16 crc kubenswrapper[4707]: E1204 09:41:16.815570 4707 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 04 09:41:16 crc kubenswrapper[4707]: E1204 09:41:16.816277 4707 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zvljf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-qmkrh_openshift-marketplace(9ad7168e-fdaa-4830-b423-e981f1640f15): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 09:41:16 crc kubenswrapper[4707]: E1204 09:41:16.817762 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-qmkrh" podUID="9ad7168e-fdaa-4830-b423-e981f1640f15" Dec 04 09:41:16 crc kubenswrapper[4707]: E1204 09:41:16.836974 4707 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 04 09:41:16 crc kubenswrapper[4707]: E1204 09:41:16.837108 4707 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8hdx7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-qrqx7_openshift-marketplace(2ffc2219-3702-4f09-9511-145919595de9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 09:41:16 crc kubenswrapper[4707]: E1204 09:41:16.838441 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-qrqx7" podUID="2ffc2219-3702-4f09-9511-145919595de9" Dec 04 09:41:18 crc kubenswrapper[4707]: E1204 09:41:18.176764 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-qmkrh" podUID="9ad7168e-fdaa-4830-b423-e981f1640f15" Dec 04 09:41:18 crc kubenswrapper[4707]: E1204 09:41:18.176842 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-qrqx7" podUID="2ffc2219-3702-4f09-9511-145919595de9" Dec 04 09:41:18 crc kubenswrapper[4707]: E1204 09:41:18.255976 4707 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 04 09:41:18 crc kubenswrapper[4707]: E1204 09:41:18.256194 4707 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-48dnx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-gcf45_openshift-marketplace(aad5050f-90b9-4364-9dc7-c32892d674d0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 09:41:18 crc kubenswrapper[4707]: E1204 09:41:18.257458 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-gcf45" podUID="aad5050f-90b9-4364-9dc7-c32892d674d0" Dec 04 09:41:18 crc kubenswrapper[4707]: E1204 09:41:18.285421 4707 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 04 09:41:18 crc kubenswrapper[4707]: E1204 09:41:18.285602 4707 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vcvwf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-cd4dt_openshift-marketplace(48677f6e-8c16-480b-aad8-d87ffe093fca): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 09:41:18 crc kubenswrapper[4707]: E1204 09:41:18.286999 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-cd4dt" podUID="48677f6e-8c16-480b-aad8-d87ffe093fca" Dec 04 09:41:19 crc kubenswrapper[4707]: I1204 09:41:19.097604 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vn2v9" Dec 04 09:41:19 crc kubenswrapper[4707]: E1204 09:41:19.309860 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-gcf45" podUID="aad5050f-90b9-4364-9dc7-c32892d674d0" Dec 04 09:41:19 crc kubenswrapper[4707]: E1204 09:41:19.310408 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cd4dt" podUID="48677f6e-8c16-480b-aad8-d87ffe093fca" Dec 04 09:41:19 crc kubenswrapper[4707]: E1204 09:41:19.404273 4707 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 04 09:41:19 crc kubenswrapper[4707]: E1204 09:41:19.404455 4707 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z99dc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-57t67_openshift-marketplace(dee25bc0-3766-43d6-8dde-8d316c48bd04): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 09:41:19 crc kubenswrapper[4707]: E1204 09:41:19.407417 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-57t67" podUID="dee25bc0-3766-43d6-8dde-8d316c48bd04" Dec 04 09:41:19 crc kubenswrapper[4707]: I1204 09:41:19.729922 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-txkn2"] Dec 04 09:41:22 crc kubenswrapper[4707]: E1204 09:41:22.224371 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-57t67" podUID="dee25bc0-3766-43d6-8dde-8d316c48bd04" Dec 04 09:41:22 crc kubenswrapper[4707]: W1204 09:41:22.261357 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a8009fd_d652_44fb_8ef1_73078262e8fa.slice/crio-75cb0870e9102e165075b72e631b2ac6cdcbf067cb205701bf6bd9b3e22216a9 WatchSource:0}: Error finding container 75cb0870e9102e165075b72e631b2ac6cdcbf067cb205701bf6bd9b3e22216a9: Status 404 returned error can't find the container with id 75cb0870e9102e165075b72e631b2ac6cdcbf067cb205701bf6bd9b3e22216a9 Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.751995 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 04 09:41:22 crc kubenswrapper[4707]: E1204 09:41:22.752670 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd10c7d6-74d9-4609-bdc3-01bb62ef242a" containerName="pruner" Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.752687 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd10c7d6-74d9-4609-bdc3-01bb62ef242a" containerName="pruner" Dec 04 09:41:22 crc kubenswrapper[4707]: E1204 09:41:22.752716 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56f00256-911d-4475-a732-8929370dd596" containerName="pruner" Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.752724 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="56f00256-911d-4475-a732-8929370dd596" containerName="pruner" Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.752853 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd10c7d6-74d9-4609-bdc3-01bb62ef242a" containerName="pruner" Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.752876 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="56f00256-911d-4475-a732-8929370dd596" containerName="pruner" Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.753289 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.756999 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.757159 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.761946 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.813980 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/307adffc-65cf-4d47-867f-a96c2970a7d8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"307adffc-65cf-4d47-867f-a96c2970a7d8\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.814084 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/307adffc-65cf-4d47-867f-a96c2970a7d8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"307adffc-65cf-4d47-867f-a96c2970a7d8\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.915248 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/307adffc-65cf-4d47-867f-a96c2970a7d8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"307adffc-65cf-4d47-867f-a96c2970a7d8\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.915367 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/307adffc-65cf-4d47-867f-a96c2970a7d8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"307adffc-65cf-4d47-867f-a96c2970a7d8\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.915466 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/307adffc-65cf-4d47-867f-a96c2970a7d8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"307adffc-65cf-4d47-867f-a96c2970a7d8\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 09:41:22 crc kubenswrapper[4707]: I1204 09:41:22.938126 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/307adffc-65cf-4d47-867f-a96c2970a7d8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"307adffc-65cf-4d47-867f-a96c2970a7d8\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 09:41:23 crc kubenswrapper[4707]: I1204 09:41:23.030586 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5t5hm" event={"ID":"9bca5523-13da-4a24-8436-3780fe794fd4","Type":"ContainerStarted","Data":"02f5c9ea5cc9cb2bb96c6ecd6b081f62d3a985f2745c84e6044b28dc12edd503"} Dec 04 09:41:23 crc kubenswrapper[4707]: I1204 09:41:23.037979 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-txkn2" event={"ID":"9a8009fd-d652-44fb-8ef1-73078262e8fa","Type":"ContainerStarted","Data":"ca6beec16e772095d8a85347c5707d8a591fffc2f2c84a36d033e02963f6aa7c"} Dec 04 09:41:23 crc kubenswrapper[4707]: I1204 09:41:23.038239 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-txkn2" event={"ID":"9a8009fd-d652-44fb-8ef1-73078262e8fa","Type":"ContainerStarted","Data":"6133e32a645d104e6dc7afa81c3186b3658b1f8295998cd43ba8e703a78012fe"} Dec 04 09:41:23 crc kubenswrapper[4707]: I1204 09:41:23.038313 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-txkn2" event={"ID":"9a8009fd-d652-44fb-8ef1-73078262e8fa","Type":"ContainerStarted","Data":"75cb0870e9102e165075b72e631b2ac6cdcbf067cb205701bf6bd9b3e22216a9"} Dec 04 09:41:23 crc kubenswrapper[4707]: I1204 09:41:23.049922 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wnkkq" event={"ID":"579805ef-aec3-4ea7-b5af-bdf514c7eb1f","Type":"ContainerStarted","Data":"db8b70e59fb9d66a9f4832e51aa6134572c47ca7aa524a82d38f71924f66d909"} Dec 04 09:41:23 crc kubenswrapper[4707]: I1204 09:41:23.053099 4707 generic.go:334] "Generic (PLEG): container finished" podID="4e905028-4918-4514-a35a-74f753b746ab" containerID="4f84f67f7d557311a8aeff6de60aef5816546a29e3321632dcb293933d453636" exitCode=0 Dec 04 09:41:23 crc kubenswrapper[4707]: I1204 09:41:23.053131 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrkjm" event={"ID":"4e905028-4918-4514-a35a-74f753b746ab","Type":"ContainerDied","Data":"4f84f67f7d557311a8aeff6de60aef5816546a29e3321632dcb293933d453636"} Dec 04 09:41:23 crc kubenswrapper[4707]: I1204 09:41:23.084243 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-txkn2" podStartSLOduration=162.084219277 podStartE2EDuration="2m42.084219277s" podCreationTimestamp="2025-12-04 09:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:41:23.079591786 +0000 UTC m=+182.515414303" watchObservedRunningTime="2025-12-04 09:41:23.084219277 +0000 UTC m=+182.520041784" Dec 04 09:41:23 crc kubenswrapper[4707]: I1204 09:41:23.084701 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 09:41:23 crc kubenswrapper[4707]: I1204 09:41:23.517839 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 04 09:41:24 crc kubenswrapper[4707]: I1204 09:41:24.062525 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrkjm" event={"ID":"4e905028-4918-4514-a35a-74f753b746ab","Type":"ContainerStarted","Data":"cca69eacc00aa82b06b87f244cde54a9a17c6675562f50ef21790f33f27ae992"} Dec 04 09:41:24 crc kubenswrapper[4707]: I1204 09:41:24.065082 4707 generic.go:334] "Generic (PLEG): container finished" podID="9bca5523-13da-4a24-8436-3780fe794fd4" containerID="02f5c9ea5cc9cb2bb96c6ecd6b081f62d3a985f2745c84e6044b28dc12edd503" exitCode=0 Dec 04 09:41:24 crc kubenswrapper[4707]: I1204 09:41:24.065137 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5t5hm" event={"ID":"9bca5523-13da-4a24-8436-3780fe794fd4","Type":"ContainerDied","Data":"02f5c9ea5cc9cb2bb96c6ecd6b081f62d3a985f2745c84e6044b28dc12edd503"} Dec 04 09:41:24 crc kubenswrapper[4707]: I1204 09:41:24.067623 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"307adffc-65cf-4d47-867f-a96c2970a7d8","Type":"ContainerStarted","Data":"fe0498e9d4fd1623a892c55cf926c41d8aeb1b5db87ad206bd6ee9b3a647e34a"} Dec 04 09:41:24 crc kubenswrapper[4707]: I1204 09:41:24.067686 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"307adffc-65cf-4d47-867f-a96c2970a7d8","Type":"ContainerStarted","Data":"2375b2416f5596f8cea46f7c715953fa3b8a5e870f9ed5bfd2debfce55aa5d06"} Dec 04 09:41:24 crc kubenswrapper[4707]: I1204 09:41:24.071824 4707 generic.go:334] "Generic (PLEG): container finished" podID="579805ef-aec3-4ea7-b5af-bdf514c7eb1f" containerID="db8b70e59fb9d66a9f4832e51aa6134572c47ca7aa524a82d38f71924f66d909" exitCode=0 Dec 04 09:41:24 crc kubenswrapper[4707]: I1204 09:41:24.072551 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wnkkq" event={"ID":"579805ef-aec3-4ea7-b5af-bdf514c7eb1f","Type":"ContainerDied","Data":"db8b70e59fb9d66a9f4832e51aa6134572c47ca7aa524a82d38f71924f66d909"} Dec 04 09:41:24 crc kubenswrapper[4707]: I1204 09:41:24.079531 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vrkjm" podStartSLOduration=3.253727196 podStartE2EDuration="36.079516325s" podCreationTimestamp="2025-12-04 09:40:48 +0000 UTC" firstStartedPulling="2025-12-04 09:40:50.785667882 +0000 UTC m=+150.221490389" lastFinishedPulling="2025-12-04 09:41:23.611457011 +0000 UTC m=+183.047279518" observedRunningTime="2025-12-04 09:41:24.077531871 +0000 UTC m=+183.513354378" watchObservedRunningTime="2025-12-04 09:41:24.079516325 +0000 UTC m=+183.515338832" Dec 04 09:41:24 crc kubenswrapper[4707]: I1204 09:41:24.120413 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=2.120398899 podStartE2EDuration="2.120398899s" podCreationTimestamp="2025-12-04 09:41:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:41:24.100753148 +0000 UTC m=+183.536575665" watchObservedRunningTime="2025-12-04 09:41:24.120398899 +0000 UTC m=+183.556221406" Dec 04 09:41:25 crc kubenswrapper[4707]: I1204 09:41:25.078204 4707 generic.go:334] "Generic (PLEG): container finished" podID="307adffc-65cf-4d47-867f-a96c2970a7d8" containerID="fe0498e9d4fd1623a892c55cf926c41d8aeb1b5db87ad206bd6ee9b3a647e34a" exitCode=0 Dec 04 09:41:25 crc kubenswrapper[4707]: I1204 09:41:25.078264 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"307adffc-65cf-4d47-867f-a96c2970a7d8","Type":"ContainerDied","Data":"fe0498e9d4fd1623a892c55cf926c41d8aeb1b5db87ad206bd6ee9b3a647e34a"} Dec 04 09:41:25 crc kubenswrapper[4707]: I1204 09:41:25.082737 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wnkkq" event={"ID":"579805ef-aec3-4ea7-b5af-bdf514c7eb1f","Type":"ContainerStarted","Data":"df5fd45a3d9866f3669d1daada9ca9f1cceb0346d51b896f77002e671efaf8a0"} Dec 04 09:41:25 crc kubenswrapper[4707]: I1204 09:41:25.119594 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wnkkq" podStartSLOduration=3.443720393 podStartE2EDuration="36.119569434s" podCreationTimestamp="2025-12-04 09:40:49 +0000 UTC" firstStartedPulling="2025-12-04 09:40:51.811364841 +0000 UTC m=+151.247187348" lastFinishedPulling="2025-12-04 09:41:24.487213872 +0000 UTC m=+183.923036389" observedRunningTime="2025-12-04 09:41:25.115508331 +0000 UTC m=+184.551330848" watchObservedRunningTime="2025-12-04 09:41:25.119569434 +0000 UTC m=+184.555391981" Dec 04 09:41:26 crc kubenswrapper[4707]: I1204 09:41:26.089611 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5t5hm" event={"ID":"9bca5523-13da-4a24-8436-3780fe794fd4","Type":"ContainerStarted","Data":"c85ec74a5aa63af1dec1e13b31e32aff05da38f7efb4396a58619365919d65e3"} Dec 04 09:41:26 crc kubenswrapper[4707]: I1204 09:41:26.112230 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5t5hm" podStartSLOduration=3.922659704 podStartE2EDuration="36.112211035s" podCreationTimestamp="2025-12-04 09:40:50 +0000 UTC" firstStartedPulling="2025-12-04 09:40:52.818666772 +0000 UTC m=+152.254489279" lastFinishedPulling="2025-12-04 09:41:25.008218103 +0000 UTC m=+184.444040610" observedRunningTime="2025-12-04 09:41:26.109172636 +0000 UTC m=+185.544995183" watchObservedRunningTime="2025-12-04 09:41:26.112211035 +0000 UTC m=+185.548033542" Dec 04 09:41:26 crc kubenswrapper[4707]: I1204 09:41:26.460234 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 09:41:26 crc kubenswrapper[4707]: I1204 09:41:26.566477 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/307adffc-65cf-4d47-867f-a96c2970a7d8-kube-api-access\") pod \"307adffc-65cf-4d47-867f-a96c2970a7d8\" (UID: \"307adffc-65cf-4d47-867f-a96c2970a7d8\") " Dec 04 09:41:26 crc kubenswrapper[4707]: I1204 09:41:26.567420 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/307adffc-65cf-4d47-867f-a96c2970a7d8-kubelet-dir\") pod \"307adffc-65cf-4d47-867f-a96c2970a7d8\" (UID: \"307adffc-65cf-4d47-867f-a96c2970a7d8\") " Dec 04 09:41:26 crc kubenswrapper[4707]: I1204 09:41:26.567549 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/307adffc-65cf-4d47-867f-a96c2970a7d8-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "307adffc-65cf-4d47-867f-a96c2970a7d8" (UID: "307adffc-65cf-4d47-867f-a96c2970a7d8"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:41:26 crc kubenswrapper[4707]: I1204 09:41:26.567765 4707 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/307adffc-65cf-4d47-867f-a96c2970a7d8-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 04 09:41:26 crc kubenswrapper[4707]: I1204 09:41:26.571942 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/307adffc-65cf-4d47-867f-a96c2970a7d8-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "307adffc-65cf-4d47-867f-a96c2970a7d8" (UID: "307adffc-65cf-4d47-867f-a96c2970a7d8"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:41:26 crc kubenswrapper[4707]: I1204 09:41:26.668780 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/307adffc-65cf-4d47-867f-a96c2970a7d8-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 09:41:26 crc kubenswrapper[4707]: I1204 09:41:26.979357 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 09:41:27 crc kubenswrapper[4707]: I1204 09:41:27.096037 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"307adffc-65cf-4d47-867f-a96c2970a7d8","Type":"ContainerDied","Data":"2375b2416f5596f8cea46f7c715953fa3b8a5e870f9ed5bfd2debfce55aa5d06"} Dec 04 09:41:27 crc kubenswrapper[4707]: I1204 09:41:27.096093 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2375b2416f5596f8cea46f7c715953fa3b8a5e870f9ed5bfd2debfce55aa5d06" Dec 04 09:41:27 crc kubenswrapper[4707]: I1204 09:41:27.096049 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 09:41:29 crc kubenswrapper[4707]: I1204 09:41:29.238379 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:41:29 crc kubenswrapper[4707]: I1204 09:41:29.238888 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:41:29 crc kubenswrapper[4707]: I1204 09:41:29.313907 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:41:29 crc kubenswrapper[4707]: I1204 09:41:29.948292 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 04 09:41:29 crc kubenswrapper[4707]: E1204 09:41:29.948519 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="307adffc-65cf-4d47-867f-a96c2970a7d8" containerName="pruner" Dec 04 09:41:29 crc kubenswrapper[4707]: I1204 09:41:29.948532 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="307adffc-65cf-4d47-867f-a96c2970a7d8" containerName="pruner" Dec 04 09:41:29 crc kubenswrapper[4707]: I1204 09:41:29.948627 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="307adffc-65cf-4d47-867f-a96c2970a7d8" containerName="pruner" Dec 04 09:41:29 crc kubenswrapper[4707]: I1204 09:41:29.948967 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 04 09:41:29 crc kubenswrapper[4707]: I1204 09:41:29.951366 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 04 09:41:29 crc kubenswrapper[4707]: I1204 09:41:29.951685 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 04 09:41:29 crc kubenswrapper[4707]: I1204 09:41:29.961842 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.032057 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.032404 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.044441 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c38ed60f-f162-4163-8502-17b0d8369f08-kubelet-dir\") pod \"installer-9-crc\" (UID: \"c38ed60f-f162-4163-8502-17b0d8369f08\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.044605 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c38ed60f-f162-4163-8502-17b0d8369f08-kube-api-access\") pod \"installer-9-crc\" (UID: \"c38ed60f-f162-4163-8502-17b0d8369f08\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.044650 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/c38ed60f-f162-4163-8502-17b0d8369f08-var-lock\") pod \"installer-9-crc\" (UID: \"c38ed60f-f162-4163-8502-17b0d8369f08\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.146203 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c38ed60f-f162-4163-8502-17b0d8369f08-kube-api-access\") pod \"installer-9-crc\" (UID: \"c38ed60f-f162-4163-8502-17b0d8369f08\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.146303 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/c38ed60f-f162-4163-8502-17b0d8369f08-var-lock\") pod \"installer-9-crc\" (UID: \"c38ed60f-f162-4163-8502-17b0d8369f08\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.146411 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/c38ed60f-f162-4163-8502-17b0d8369f08-var-lock\") pod \"installer-9-crc\" (UID: \"c38ed60f-f162-4163-8502-17b0d8369f08\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.146490 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c38ed60f-f162-4163-8502-17b0d8369f08-kubelet-dir\") pod \"installer-9-crc\" (UID: \"c38ed60f-f162-4163-8502-17b0d8369f08\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.146727 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c38ed60f-f162-4163-8502-17b0d8369f08-kubelet-dir\") pod \"installer-9-crc\" (UID: \"c38ed60f-f162-4163-8502-17b0d8369f08\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.153089 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.173358 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c38ed60f-f162-4163-8502-17b0d8369f08-kube-api-access\") pod \"installer-9-crc\" (UID: \"c38ed60f-f162-4163-8502-17b0d8369f08\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.283863 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.410472 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.411854 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.455009 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.698956 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 04 09:41:30 crc kubenswrapper[4707]: W1204 09:41:30.709585 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podc38ed60f_f162_4163_8502_17b0d8369f08.slice/crio-8438461b7a7ac7e92f7d110b1e7936b4803cb610b439d793dbe077a01d937ed6 WatchSource:0}: Error finding container 8438461b7a7ac7e92f7d110b1e7936b4803cb610b439d793dbe077a01d937ed6: Status 404 returned error can't find the container with id 8438461b7a7ac7e92f7d110b1e7936b4803cb610b439d793dbe077a01d937ed6 Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.816956 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:41:30 crc kubenswrapper[4707]: I1204 09:41:30.817019 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:41:31 crc kubenswrapper[4707]: I1204 09:41:31.073744 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wnkkq" podUID="579805ef-aec3-4ea7-b5af-bdf514c7eb1f" containerName="registry-server" probeResult="failure" output=< Dec 04 09:41:31 crc kubenswrapper[4707]: timeout: failed to connect service ":50051" within 1s Dec 04 09:41:31 crc kubenswrapper[4707]: > Dec 04 09:41:31 crc kubenswrapper[4707]: I1204 09:41:31.116581 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"c38ed60f-f162-4163-8502-17b0d8369f08","Type":"ContainerStarted","Data":"8438461b7a7ac7e92f7d110b1e7936b4803cb610b439d793dbe077a01d937ed6"} Dec 04 09:41:31 crc kubenswrapper[4707]: I1204 09:41:31.484966 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrkjm"] Dec 04 09:41:32 crc kubenswrapper[4707]: I1204 09:41:32.121201 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vrkjm" podUID="4e905028-4918-4514-a35a-74f753b746ab" containerName="registry-server" containerID="cri-o://cca69eacc00aa82b06b87f244cde54a9a17c6675562f50ef21790f33f27ae992" gracePeriod=2 Dec 04 09:41:32 crc kubenswrapper[4707]: I1204 09:41:32.157488 4707 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/redhat-operators-5t5hm" podUID="9bca5523-13da-4a24-8436-3780fe794fd4" containerName="registry-server" probeResult="failure" output=< Dec 04 09:41:32 crc kubenswrapper[4707]: timeout: failed to connect service ":50051" within 1s Dec 04 09:41:32 crc kubenswrapper[4707]: > Dec 04 09:41:34 crc kubenswrapper[4707]: I1204 09:41:34.134267 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"c38ed60f-f162-4163-8502-17b0d8369f08","Type":"ContainerStarted","Data":"a6ea5d99d8ef202f4d606a6710adebf026ca3ee3194f250e039db5e1f17ac859"} Dec 04 09:41:34 crc kubenswrapper[4707]: I1204 09:41:34.140210 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrkjm" event={"ID":"4e905028-4918-4514-a35a-74f753b746ab","Type":"ContainerDied","Data":"cca69eacc00aa82b06b87f244cde54a9a17c6675562f50ef21790f33f27ae992"} Dec 04 09:41:34 crc kubenswrapper[4707]: I1204 09:41:34.140169 4707 generic.go:334] "Generic (PLEG): container finished" podID="4e905028-4918-4514-a35a-74f753b746ab" containerID="cca69eacc00aa82b06b87f244cde54a9a17c6675562f50ef21790f33f27ae992" exitCode=0 Dec 04 09:41:34 crc kubenswrapper[4707]: I1204 09:41:34.151665 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=5.151642127 podStartE2EDuration="5.151642127s" podCreationTimestamp="2025-12-04 09:41:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:41:34.150586312 +0000 UTC m=+193.586408839" watchObservedRunningTime="2025-12-04 09:41:34.151642127 +0000 UTC m=+193.587464634" Dec 04 09:41:37 crc kubenswrapper[4707]: I1204 09:41:37.690372 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:41:37 crc kubenswrapper[4707]: I1204 09:41:37.764410 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e905028-4918-4514-a35a-74f753b746ab-catalog-content\") pod \"4e905028-4918-4514-a35a-74f753b746ab\" (UID: \"4e905028-4918-4514-a35a-74f753b746ab\") " Dec 04 09:41:37 crc kubenswrapper[4707]: I1204 09:41:37.764799 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e905028-4918-4514-a35a-74f753b746ab-utilities\") pod \"4e905028-4918-4514-a35a-74f753b746ab\" (UID: \"4e905028-4918-4514-a35a-74f753b746ab\") " Dec 04 09:41:37 crc kubenswrapper[4707]: I1204 09:41:37.764936 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrnpf\" (UniqueName: \"kubernetes.io/projected/4e905028-4918-4514-a35a-74f753b746ab-kube-api-access-vrnpf\") pod \"4e905028-4918-4514-a35a-74f753b746ab\" (UID: \"4e905028-4918-4514-a35a-74f753b746ab\") " Dec 04 09:41:37 crc kubenswrapper[4707]: I1204 09:41:37.765748 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e905028-4918-4514-a35a-74f753b746ab-utilities" (OuterVolumeSpecName: "utilities") pod "4e905028-4918-4514-a35a-74f753b746ab" (UID: "4e905028-4918-4514-a35a-74f753b746ab"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:41:37 crc kubenswrapper[4707]: I1204 09:41:37.770930 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e905028-4918-4514-a35a-74f753b746ab-kube-api-access-vrnpf" (OuterVolumeSpecName: "kube-api-access-vrnpf") pod "4e905028-4918-4514-a35a-74f753b746ab" (UID: "4e905028-4918-4514-a35a-74f753b746ab"). InnerVolumeSpecName "kube-api-access-vrnpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:41:37 crc kubenswrapper[4707]: I1204 09:41:37.785054 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e905028-4918-4514-a35a-74f753b746ab-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4e905028-4918-4514-a35a-74f753b746ab" (UID: "4e905028-4918-4514-a35a-74f753b746ab"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:41:37 crc kubenswrapper[4707]: I1204 09:41:37.865697 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e905028-4918-4514-a35a-74f753b746ab-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:41:37 crc kubenswrapper[4707]: I1204 09:41:37.865958 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e905028-4918-4514-a35a-74f753b746ab-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:41:37 crc kubenswrapper[4707]: I1204 09:41:37.866032 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrnpf\" (UniqueName: \"kubernetes.io/projected/4e905028-4918-4514-a35a-74f753b746ab-kube-api-access-vrnpf\") on node \"crc\" DevicePath \"\"" Dec 04 09:41:38 crc kubenswrapper[4707]: I1204 09:41:38.166941 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrkjm" event={"ID":"4e905028-4918-4514-a35a-74f753b746ab","Type":"ContainerDied","Data":"a77c8ebd9220c34c37215d40c1592621a3cfa42974349100457794a876a782f8"} Dec 04 09:41:38 crc kubenswrapper[4707]: I1204 09:41:38.167003 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrkjm" Dec 04 09:41:38 crc kubenswrapper[4707]: I1204 09:41:38.167012 4707 scope.go:117] "RemoveContainer" containerID="cca69eacc00aa82b06b87f244cde54a9a17c6675562f50ef21790f33f27ae992" Dec 04 09:41:38 crc kubenswrapper[4707]: I1204 09:41:38.197961 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrkjm"] Dec 04 09:41:38 crc kubenswrapper[4707]: I1204 09:41:38.201348 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrkjm"] Dec 04 09:41:38 crc kubenswrapper[4707]: I1204 09:41:38.851714 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e905028-4918-4514-a35a-74f753b746ab" path="/var/lib/kubelet/pods/4e905028-4918-4514-a35a-74f753b746ab/volumes" Dec 04 09:41:40 crc kubenswrapper[4707]: I1204 09:41:40.073466 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:41:40 crc kubenswrapper[4707]: I1204 09:41:40.117939 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:41:40 crc kubenswrapper[4707]: I1204 09:41:40.453829 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:41:40 crc kubenswrapper[4707]: I1204 09:41:40.575711 4707 scope.go:117] "RemoveContainer" containerID="4f84f67f7d557311a8aeff6de60aef5816546a29e3321632dcb293933d453636" Dec 04 09:41:40 crc kubenswrapper[4707]: I1204 09:41:40.602579 4707 scope.go:117] "RemoveContainer" containerID="904c2ea2582cec75413d57d6ddc0a39b85f526c0277c35f36f2f1a5e95d19f49" Dec 04 09:41:41 crc kubenswrapper[4707]: I1204 09:41:41.679495 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5t5hm"] Dec 04 09:41:41 crc kubenswrapper[4707]: I1204 09:41:41.680080 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5t5hm" podUID="9bca5523-13da-4a24-8436-3780fe794fd4" containerName="registry-server" containerID="cri-o://c85ec74a5aa63af1dec1e13b31e32aff05da38f7efb4396a58619365919d65e3" gracePeriod=2 Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.076407 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.218876 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fwb9\" (UniqueName: \"kubernetes.io/projected/9bca5523-13da-4a24-8436-3780fe794fd4-kube-api-access-9fwb9\") pod \"9bca5523-13da-4a24-8436-3780fe794fd4\" (UID: \"9bca5523-13da-4a24-8436-3780fe794fd4\") " Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.219031 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bca5523-13da-4a24-8436-3780fe794fd4-utilities\") pod \"9bca5523-13da-4a24-8436-3780fe794fd4\" (UID: \"9bca5523-13da-4a24-8436-3780fe794fd4\") " Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.219090 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bca5523-13da-4a24-8436-3780fe794fd4-catalog-content\") pod \"9bca5523-13da-4a24-8436-3780fe794fd4\" (UID: \"9bca5523-13da-4a24-8436-3780fe794fd4\") " Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.222583 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9bca5523-13da-4a24-8436-3780fe794fd4-utilities" (OuterVolumeSpecName: "utilities") pod "9bca5523-13da-4a24-8436-3780fe794fd4" (UID: "9bca5523-13da-4a24-8436-3780fe794fd4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.238218 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bca5523-13da-4a24-8436-3780fe794fd4-kube-api-access-9fwb9" (OuterVolumeSpecName: "kube-api-access-9fwb9") pod "9bca5523-13da-4a24-8436-3780fe794fd4" (UID: "9bca5523-13da-4a24-8436-3780fe794fd4"). InnerVolumeSpecName "kube-api-access-9fwb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.239516 4707 generic.go:334] "Generic (PLEG): container finished" podID="48677f6e-8c16-480b-aad8-d87ffe093fca" containerID="876d208b78492916a0b0a9a3d98f10ced70ca89dd23f96670887491b2954d22b" exitCode=0 Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.239619 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cd4dt" event={"ID":"48677f6e-8c16-480b-aad8-d87ffe093fca","Type":"ContainerDied","Data":"876d208b78492916a0b0a9a3d98f10ced70ca89dd23f96670887491b2954d22b"} Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.249351 4707 generic.go:334] "Generic (PLEG): container finished" podID="9bca5523-13da-4a24-8436-3780fe794fd4" containerID="c85ec74a5aa63af1dec1e13b31e32aff05da38f7efb4396a58619365919d65e3" exitCode=0 Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.249437 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5t5hm" event={"ID":"9bca5523-13da-4a24-8436-3780fe794fd4","Type":"ContainerDied","Data":"c85ec74a5aa63af1dec1e13b31e32aff05da38f7efb4396a58619365919d65e3"} Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.249466 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5t5hm" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.249483 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5t5hm" event={"ID":"9bca5523-13da-4a24-8436-3780fe794fd4","Type":"ContainerDied","Data":"813048458fb6a78807fc3f14e4214a0c6ff28c3b391f7ab24b6717a8d369997c"} Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.249504 4707 scope.go:117] "RemoveContainer" containerID="c85ec74a5aa63af1dec1e13b31e32aff05da38f7efb4396a58619365919d65e3" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.262898 4707 generic.go:334] "Generic (PLEG): container finished" podID="dee25bc0-3766-43d6-8dde-8d316c48bd04" containerID="24716a6a57f76f1ad09c8ab49ff658f478a0cf7e4d43c4aae360235bb2697abe" exitCode=0 Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.262972 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57t67" event={"ID":"dee25bc0-3766-43d6-8dde-8d316c48bd04","Type":"ContainerDied","Data":"24716a6a57f76f1ad09c8ab49ff658f478a0cf7e4d43c4aae360235bb2697abe"} Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.265686 4707 generic.go:334] "Generic (PLEG): container finished" podID="aad5050f-90b9-4364-9dc7-c32892d674d0" containerID="be070ee7f9cb7d1d36021357c5e5d096dcb96d28f29636f7de6a42e44ed9bfaf" exitCode=0 Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.265778 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gcf45" event={"ID":"aad5050f-90b9-4364-9dc7-c32892d674d0","Type":"ContainerDied","Data":"be070ee7f9cb7d1d36021357c5e5d096dcb96d28f29636f7de6a42e44ed9bfaf"} Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.270320 4707 generic.go:334] "Generic (PLEG): container finished" podID="9ad7168e-fdaa-4830-b423-e981f1640f15" containerID="c08fcf6af4a4dedbcb342a46ef1efba311ce73a66c97814da572d9a1d3fd3a2d" exitCode=0 Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.270392 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qmkrh" event={"ID":"9ad7168e-fdaa-4830-b423-e981f1640f15","Type":"ContainerDied","Data":"c08fcf6af4a4dedbcb342a46ef1efba311ce73a66c97814da572d9a1d3fd3a2d"} Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.284646 4707 scope.go:117] "RemoveContainer" containerID="02f5c9ea5cc9cb2bb96c6ecd6b081f62d3a985f2745c84e6044b28dc12edd503" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.284859 4707 generic.go:334] "Generic (PLEG): container finished" podID="2ffc2219-3702-4f09-9511-145919595de9" containerID="5a196f573fe2a853e1fe68a57e6c5def7c110fc93a805fce66c61316223c7a8f" exitCode=0 Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.284907 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrqx7" event={"ID":"2ffc2219-3702-4f09-9511-145919595de9","Type":"ContainerDied","Data":"5a196f573fe2a853e1fe68a57e6c5def7c110fc93a805fce66c61316223c7a8f"} Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.315685 4707 scope.go:117] "RemoveContainer" containerID="c4f5126591fee1291ae492da38cb83864f93efa4a244cec9c34b8d1e543e06f8" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.320399 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bca5523-13da-4a24-8436-3780fe794fd4-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.320455 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fwb9\" (UniqueName: \"kubernetes.io/projected/9bca5523-13da-4a24-8436-3780fe794fd4-kube-api-access-9fwb9\") on node \"crc\" DevicePath \"\"" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.339889 4707 scope.go:117] "RemoveContainer" containerID="c85ec74a5aa63af1dec1e13b31e32aff05da38f7efb4396a58619365919d65e3" Dec 04 09:41:42 crc kubenswrapper[4707]: E1204 09:41:42.342420 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c85ec74a5aa63af1dec1e13b31e32aff05da38f7efb4396a58619365919d65e3\": container with ID starting with c85ec74a5aa63af1dec1e13b31e32aff05da38f7efb4396a58619365919d65e3 not found: ID does not exist" containerID="c85ec74a5aa63af1dec1e13b31e32aff05da38f7efb4396a58619365919d65e3" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.342461 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c85ec74a5aa63af1dec1e13b31e32aff05da38f7efb4396a58619365919d65e3"} err="failed to get container status \"c85ec74a5aa63af1dec1e13b31e32aff05da38f7efb4396a58619365919d65e3\": rpc error: code = NotFound desc = could not find container \"c85ec74a5aa63af1dec1e13b31e32aff05da38f7efb4396a58619365919d65e3\": container with ID starting with c85ec74a5aa63af1dec1e13b31e32aff05da38f7efb4396a58619365919d65e3 not found: ID does not exist" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.342504 4707 scope.go:117] "RemoveContainer" containerID="02f5c9ea5cc9cb2bb96c6ecd6b081f62d3a985f2745c84e6044b28dc12edd503" Dec 04 09:41:42 crc kubenswrapper[4707]: E1204 09:41:42.344871 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02f5c9ea5cc9cb2bb96c6ecd6b081f62d3a985f2745c84e6044b28dc12edd503\": container with ID starting with 02f5c9ea5cc9cb2bb96c6ecd6b081f62d3a985f2745c84e6044b28dc12edd503 not found: ID does not exist" containerID="02f5c9ea5cc9cb2bb96c6ecd6b081f62d3a985f2745c84e6044b28dc12edd503" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.344924 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02f5c9ea5cc9cb2bb96c6ecd6b081f62d3a985f2745c84e6044b28dc12edd503"} err="failed to get container status \"02f5c9ea5cc9cb2bb96c6ecd6b081f62d3a985f2745c84e6044b28dc12edd503\": rpc error: code = NotFound desc = could not find container \"02f5c9ea5cc9cb2bb96c6ecd6b081f62d3a985f2745c84e6044b28dc12edd503\": container with ID starting with 02f5c9ea5cc9cb2bb96c6ecd6b081f62d3a985f2745c84e6044b28dc12edd503 not found: ID does not exist" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.344963 4707 scope.go:117] "RemoveContainer" containerID="c4f5126591fee1291ae492da38cb83864f93efa4a244cec9c34b8d1e543e06f8" Dec 04 09:41:42 crc kubenswrapper[4707]: E1204 09:41:42.345295 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4f5126591fee1291ae492da38cb83864f93efa4a244cec9c34b8d1e543e06f8\": container with ID starting with c4f5126591fee1291ae492da38cb83864f93efa4a244cec9c34b8d1e543e06f8 not found: ID does not exist" containerID="c4f5126591fee1291ae492da38cb83864f93efa4a244cec9c34b8d1e543e06f8" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.345354 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4f5126591fee1291ae492da38cb83864f93efa4a244cec9c34b8d1e543e06f8"} err="failed to get container status \"c4f5126591fee1291ae492da38cb83864f93efa4a244cec9c34b8d1e543e06f8\": rpc error: code = NotFound desc = could not find container \"c4f5126591fee1291ae492da38cb83864f93efa4a244cec9c34b8d1e543e06f8\": container with ID starting with c4f5126591fee1291ae492da38cb83864f93efa4a244cec9c34b8d1e543e06f8 not found: ID does not exist" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.385109 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9bca5523-13da-4a24-8436-3780fe794fd4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9bca5523-13da-4a24-8436-3780fe794fd4" (UID: "9bca5523-13da-4a24-8436-3780fe794fd4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.422507 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bca5523-13da-4a24-8436-3780fe794fd4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.585627 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5t5hm"] Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.588621 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5t5hm"] Dec 04 09:41:42 crc kubenswrapper[4707]: I1204 09:41:42.852290 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bca5523-13da-4a24-8436-3780fe794fd4" path="/var/lib/kubelet/pods/9bca5523-13da-4a24-8436-3780fe794fd4/volumes" Dec 04 09:41:43 crc kubenswrapper[4707]: I1204 09:41:43.292964 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrqx7" event={"ID":"2ffc2219-3702-4f09-9511-145919595de9","Type":"ContainerStarted","Data":"b31ff96d13f1b2d5ae54edc65697d5360d6d3ba6d2a5293f0b447f1a972dd21f"} Dec 04 09:41:43 crc kubenswrapper[4707]: I1204 09:41:43.295320 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cd4dt" event={"ID":"48677f6e-8c16-480b-aad8-d87ffe093fca","Type":"ContainerStarted","Data":"64c1588e2061af560a03f6ba7e4d42efc190ebe38663b1a84d9c71e722e8d5c1"} Dec 04 09:41:43 crc kubenswrapper[4707]: I1204 09:41:43.299396 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57t67" event={"ID":"dee25bc0-3766-43d6-8dde-8d316c48bd04","Type":"ContainerStarted","Data":"fba7bb099b62618e0a229b57697bac350adf20523b22e5cfd7106887383a966f"} Dec 04 09:41:43 crc kubenswrapper[4707]: I1204 09:41:43.304453 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gcf45" event={"ID":"aad5050f-90b9-4364-9dc7-c32892d674d0","Type":"ContainerStarted","Data":"6277faef785e7ec6a2aa2e75ad9ba784613777409692d91f378bbbbb51e5c4f7"} Dec 04 09:41:43 crc kubenswrapper[4707]: I1204 09:41:43.307796 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qmkrh" event={"ID":"9ad7168e-fdaa-4830-b423-e981f1640f15","Type":"ContainerStarted","Data":"0073bd0aed7a7e95bb2a3ac2f10e48f908a0772c5f9f4f8b0a9eb772fbc7cf20"} Dec 04 09:41:43 crc kubenswrapper[4707]: I1204 09:41:43.319834 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qrqx7" podStartSLOduration=3.034867838 podStartE2EDuration="57.319813947s" podCreationTimestamp="2025-12-04 09:40:46 +0000 UTC" firstStartedPulling="2025-12-04 09:40:48.593593555 +0000 UTC m=+148.029416062" lastFinishedPulling="2025-12-04 09:41:42.878539664 +0000 UTC m=+202.314362171" observedRunningTime="2025-12-04 09:41:43.315938844 +0000 UTC m=+202.751761351" watchObservedRunningTime="2025-12-04 09:41:43.319813947 +0000 UTC m=+202.755636464" Dec 04 09:41:43 crc kubenswrapper[4707]: I1204 09:41:43.343082 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gcf45" podStartSLOduration=2.067578241 podStartE2EDuration="57.343059331s" podCreationTimestamp="2025-12-04 09:40:46 +0000 UTC" firstStartedPulling="2025-12-04 09:40:47.555435498 +0000 UTC m=+146.991258005" lastFinishedPulling="2025-12-04 09:41:42.830916588 +0000 UTC m=+202.266739095" observedRunningTime="2025-12-04 09:41:43.33921527 +0000 UTC m=+202.775037777" watchObservedRunningTime="2025-12-04 09:41:43.343059331 +0000 UTC m=+202.778881838" Dec 04 09:41:43 crc kubenswrapper[4707]: I1204 09:41:43.361942 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cd4dt" podStartSLOduration=3.203514791 podStartE2EDuration="57.361921338s" podCreationTimestamp="2025-12-04 09:40:46 +0000 UTC" firstStartedPulling="2025-12-04 09:40:48.606140723 +0000 UTC m=+148.041963230" lastFinishedPulling="2025-12-04 09:41:42.76454728 +0000 UTC m=+202.200369777" observedRunningTime="2025-12-04 09:41:43.357607851 +0000 UTC m=+202.793430368" watchObservedRunningTime="2025-12-04 09:41:43.361921338 +0000 UTC m=+202.797743855" Dec 04 09:41:43 crc kubenswrapper[4707]: I1204 09:41:43.377222 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qmkrh" podStartSLOduration=2.292205351 podStartE2EDuration="56.377203201s" podCreationTimestamp="2025-12-04 09:40:47 +0000 UTC" firstStartedPulling="2025-12-04 09:40:48.651908246 +0000 UTC m=+148.087730753" lastFinishedPulling="2025-12-04 09:41:42.736906056 +0000 UTC m=+202.172728603" observedRunningTime="2025-12-04 09:41:43.376265541 +0000 UTC m=+202.812088068" watchObservedRunningTime="2025-12-04 09:41:43.377203201 +0000 UTC m=+202.813025708" Dec 04 09:41:43 crc kubenswrapper[4707]: I1204 09:41:43.400630 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-57t67" podStartSLOduration=2.329121612 podStartE2EDuration="55.400606681s" podCreationTimestamp="2025-12-04 09:40:48 +0000 UTC" firstStartedPulling="2025-12-04 09:40:49.714063064 +0000 UTC m=+149.149885571" lastFinishedPulling="2025-12-04 09:41:42.785548133 +0000 UTC m=+202.221370640" observedRunningTime="2025-12-04 09:41:43.397566335 +0000 UTC m=+202.833388852" watchObservedRunningTime="2025-12-04 09:41:43.400606681 +0000 UTC m=+202.836429208" Dec 04 09:41:46 crc kubenswrapper[4707]: I1204 09:41:46.813752 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:41:46 crc kubenswrapper[4707]: I1204 09:41:46.814405 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:41:46 crc kubenswrapper[4707]: I1204 09:41:46.859897 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:41:47 crc kubenswrapper[4707]: I1204 09:41:47.078721 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:41:47 crc kubenswrapper[4707]: I1204 09:41:47.078884 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:41:47 crc kubenswrapper[4707]: I1204 09:41:47.120157 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:41:47 crc kubenswrapper[4707]: I1204 09:41:47.219830 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:41:47 crc kubenswrapper[4707]: I1204 09:41:47.220061 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:41:47 crc kubenswrapper[4707]: I1204 09:41:47.278911 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:41:47 crc kubenswrapper[4707]: I1204 09:41:47.489724 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:41:47 crc kubenswrapper[4707]: I1204 09:41:47.489789 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:41:47 crc kubenswrapper[4707]: I1204 09:41:47.537642 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:41:48 crc kubenswrapper[4707]: I1204 09:41:48.400290 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:41:48 crc kubenswrapper[4707]: I1204 09:41:48.405486 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:41:48 crc kubenswrapper[4707]: I1204 09:41:48.417556 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:41:48 crc kubenswrapper[4707]: I1204 09:41:48.805206 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:41:48 crc kubenswrapper[4707]: I1204 09:41:48.806521 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:41:48 crc kubenswrapper[4707]: I1204 09:41:48.851862 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:41:49 crc kubenswrapper[4707]: I1204 09:41:49.380799 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:41:50 crc kubenswrapper[4707]: I1204 09:41:50.478247 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qmkrh"] Dec 04 09:41:50 crc kubenswrapper[4707]: I1204 09:41:50.478667 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qmkrh" podUID="9ad7168e-fdaa-4830-b423-e981f1640f15" containerName="registry-server" containerID="cri-o://0073bd0aed7a7e95bb2a3ac2f10e48f908a0772c5f9f4f8b0a9eb772fbc7cf20" gracePeriod=2 Dec 04 09:41:51 crc kubenswrapper[4707]: I1204 09:41:51.480981 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cd4dt"] Dec 04 09:41:51 crc kubenswrapper[4707]: I1204 09:41:51.481620 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cd4dt" podUID="48677f6e-8c16-480b-aad8-d87ffe093fca" containerName="registry-server" containerID="cri-o://64c1588e2061af560a03f6ba7e4d42efc190ebe38663b1a84d9c71e722e8d5c1" gracePeriod=2 Dec 04 09:41:51 crc kubenswrapper[4707]: I1204 09:41:51.892773 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:41:51 crc kubenswrapper[4707]: I1204 09:41:51.975937 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.054170 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48677f6e-8c16-480b-aad8-d87ffe093fca-utilities\") pod \"48677f6e-8c16-480b-aad8-d87ffe093fca\" (UID: \"48677f6e-8c16-480b-aad8-d87ffe093fca\") " Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.054315 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vcvwf\" (UniqueName: \"kubernetes.io/projected/48677f6e-8c16-480b-aad8-d87ffe093fca-kube-api-access-vcvwf\") pod \"48677f6e-8c16-480b-aad8-d87ffe093fca\" (UID: \"48677f6e-8c16-480b-aad8-d87ffe093fca\") " Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.054371 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48677f6e-8c16-480b-aad8-d87ffe093fca-catalog-content\") pod \"48677f6e-8c16-480b-aad8-d87ffe093fca\" (UID: \"48677f6e-8c16-480b-aad8-d87ffe093fca\") " Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.055015 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48677f6e-8c16-480b-aad8-d87ffe093fca-utilities" (OuterVolumeSpecName: "utilities") pod "48677f6e-8c16-480b-aad8-d87ffe093fca" (UID: "48677f6e-8c16-480b-aad8-d87ffe093fca"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.060907 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48677f6e-8c16-480b-aad8-d87ffe093fca-kube-api-access-vcvwf" (OuterVolumeSpecName: "kube-api-access-vcvwf") pod "48677f6e-8c16-480b-aad8-d87ffe093fca" (UID: "48677f6e-8c16-480b-aad8-d87ffe093fca"). InnerVolumeSpecName "kube-api-access-vcvwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.107859 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48677f6e-8c16-480b-aad8-d87ffe093fca-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "48677f6e-8c16-480b-aad8-d87ffe093fca" (UID: "48677f6e-8c16-480b-aad8-d87ffe093fca"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.155724 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ad7168e-fdaa-4830-b423-e981f1640f15-utilities\") pod \"9ad7168e-fdaa-4830-b423-e981f1640f15\" (UID: \"9ad7168e-fdaa-4830-b423-e981f1640f15\") " Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.155809 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ad7168e-fdaa-4830-b423-e981f1640f15-catalog-content\") pod \"9ad7168e-fdaa-4830-b423-e981f1640f15\" (UID: \"9ad7168e-fdaa-4830-b423-e981f1640f15\") " Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.155947 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvljf\" (UniqueName: \"kubernetes.io/projected/9ad7168e-fdaa-4830-b423-e981f1640f15-kube-api-access-zvljf\") pod \"9ad7168e-fdaa-4830-b423-e981f1640f15\" (UID: \"9ad7168e-fdaa-4830-b423-e981f1640f15\") " Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.156493 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vcvwf\" (UniqueName: \"kubernetes.io/projected/48677f6e-8c16-480b-aad8-d87ffe093fca-kube-api-access-vcvwf\") on node \"crc\" DevicePath \"\"" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.156529 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48677f6e-8c16-480b-aad8-d87ffe093fca-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.156516 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ad7168e-fdaa-4830-b423-e981f1640f15-utilities" (OuterVolumeSpecName: "utilities") pod "9ad7168e-fdaa-4830-b423-e981f1640f15" (UID: "9ad7168e-fdaa-4830-b423-e981f1640f15"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.156543 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48677f6e-8c16-480b-aad8-d87ffe093fca-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.158538 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ad7168e-fdaa-4830-b423-e981f1640f15-kube-api-access-zvljf" (OuterVolumeSpecName: "kube-api-access-zvljf") pod "9ad7168e-fdaa-4830-b423-e981f1640f15" (UID: "9ad7168e-fdaa-4830-b423-e981f1640f15"). InnerVolumeSpecName "kube-api-access-zvljf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.215652 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ad7168e-fdaa-4830-b423-e981f1640f15-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ad7168e-fdaa-4830-b423-e981f1640f15" (UID: "9ad7168e-fdaa-4830-b423-e981f1640f15"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.258228 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ad7168e-fdaa-4830-b423-e981f1640f15-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.258265 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ad7168e-fdaa-4830-b423-e981f1640f15-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.258277 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvljf\" (UniqueName: \"kubernetes.io/projected/9ad7168e-fdaa-4830-b423-e981f1640f15-kube-api-access-zvljf\") on node \"crc\" DevicePath \"\"" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.356806 4707 generic.go:334] "Generic (PLEG): container finished" podID="48677f6e-8c16-480b-aad8-d87ffe093fca" containerID="64c1588e2061af560a03f6ba7e4d42efc190ebe38663b1a84d9c71e722e8d5c1" exitCode=0 Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.356864 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cd4dt" event={"ID":"48677f6e-8c16-480b-aad8-d87ffe093fca","Type":"ContainerDied","Data":"64c1588e2061af560a03f6ba7e4d42efc190ebe38663b1a84d9c71e722e8d5c1"} Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.356891 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cd4dt" event={"ID":"48677f6e-8c16-480b-aad8-d87ffe093fca","Type":"ContainerDied","Data":"99ed92869884146e5a1025d2c434183f2571dd9641d5e7147704599273637e88"} Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.356907 4707 scope.go:117] "RemoveContainer" containerID="64c1588e2061af560a03f6ba7e4d42efc190ebe38663b1a84d9c71e722e8d5c1" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.357025 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cd4dt" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.360572 4707 generic.go:334] "Generic (PLEG): container finished" podID="9ad7168e-fdaa-4830-b423-e981f1640f15" containerID="0073bd0aed7a7e95bb2a3ac2f10e48f908a0772c5f9f4f8b0a9eb772fbc7cf20" exitCode=0 Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.360612 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qmkrh" event={"ID":"9ad7168e-fdaa-4830-b423-e981f1640f15","Type":"ContainerDied","Data":"0073bd0aed7a7e95bb2a3ac2f10e48f908a0772c5f9f4f8b0a9eb772fbc7cf20"} Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.360640 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qmkrh" event={"ID":"9ad7168e-fdaa-4830-b423-e981f1640f15","Type":"ContainerDied","Data":"7a8d539cf644523ac18b5756478ad23f7df6a9aaecfc5554c3050dbf21b2c874"} Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.360745 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qmkrh" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.385150 4707 scope.go:117] "RemoveContainer" containerID="876d208b78492916a0b0a9a3d98f10ced70ca89dd23f96670887491b2954d22b" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.390841 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cd4dt"] Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.394574 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cd4dt"] Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.406318 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qmkrh"] Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.409693 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qmkrh"] Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.423546 4707 scope.go:117] "RemoveContainer" containerID="8f2bf6398dd1c674a546fe3cd1f89f838cea8da10c0a9e1063765824451ebe46" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.438632 4707 scope.go:117] "RemoveContainer" containerID="64c1588e2061af560a03f6ba7e4d42efc190ebe38663b1a84d9c71e722e8d5c1" Dec 04 09:41:52 crc kubenswrapper[4707]: E1204 09:41:52.439188 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64c1588e2061af560a03f6ba7e4d42efc190ebe38663b1a84d9c71e722e8d5c1\": container with ID starting with 64c1588e2061af560a03f6ba7e4d42efc190ebe38663b1a84d9c71e722e8d5c1 not found: ID does not exist" containerID="64c1588e2061af560a03f6ba7e4d42efc190ebe38663b1a84d9c71e722e8d5c1" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.439267 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64c1588e2061af560a03f6ba7e4d42efc190ebe38663b1a84d9c71e722e8d5c1"} err="failed to get container status \"64c1588e2061af560a03f6ba7e4d42efc190ebe38663b1a84d9c71e722e8d5c1\": rpc error: code = NotFound desc = could not find container \"64c1588e2061af560a03f6ba7e4d42efc190ebe38663b1a84d9c71e722e8d5c1\": container with ID starting with 64c1588e2061af560a03f6ba7e4d42efc190ebe38663b1a84d9c71e722e8d5c1 not found: ID does not exist" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.439318 4707 scope.go:117] "RemoveContainer" containerID="876d208b78492916a0b0a9a3d98f10ced70ca89dd23f96670887491b2954d22b" Dec 04 09:41:52 crc kubenswrapper[4707]: E1204 09:41:52.439769 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"876d208b78492916a0b0a9a3d98f10ced70ca89dd23f96670887491b2954d22b\": container with ID starting with 876d208b78492916a0b0a9a3d98f10ced70ca89dd23f96670887491b2954d22b not found: ID does not exist" containerID="876d208b78492916a0b0a9a3d98f10ced70ca89dd23f96670887491b2954d22b" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.439829 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"876d208b78492916a0b0a9a3d98f10ced70ca89dd23f96670887491b2954d22b"} err="failed to get container status \"876d208b78492916a0b0a9a3d98f10ced70ca89dd23f96670887491b2954d22b\": rpc error: code = NotFound desc = could not find container \"876d208b78492916a0b0a9a3d98f10ced70ca89dd23f96670887491b2954d22b\": container with ID starting with 876d208b78492916a0b0a9a3d98f10ced70ca89dd23f96670887491b2954d22b not found: ID does not exist" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.439877 4707 scope.go:117] "RemoveContainer" containerID="8f2bf6398dd1c674a546fe3cd1f89f838cea8da10c0a9e1063765824451ebe46" Dec 04 09:41:52 crc kubenswrapper[4707]: E1204 09:41:52.440172 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f2bf6398dd1c674a546fe3cd1f89f838cea8da10c0a9e1063765824451ebe46\": container with ID starting with 8f2bf6398dd1c674a546fe3cd1f89f838cea8da10c0a9e1063765824451ebe46 not found: ID does not exist" containerID="8f2bf6398dd1c674a546fe3cd1f89f838cea8da10c0a9e1063765824451ebe46" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.440198 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f2bf6398dd1c674a546fe3cd1f89f838cea8da10c0a9e1063765824451ebe46"} err="failed to get container status \"8f2bf6398dd1c674a546fe3cd1f89f838cea8da10c0a9e1063765824451ebe46\": rpc error: code = NotFound desc = could not find container \"8f2bf6398dd1c674a546fe3cd1f89f838cea8da10c0a9e1063765824451ebe46\": container with ID starting with 8f2bf6398dd1c674a546fe3cd1f89f838cea8da10c0a9e1063765824451ebe46 not found: ID does not exist" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.440211 4707 scope.go:117] "RemoveContainer" containerID="0073bd0aed7a7e95bb2a3ac2f10e48f908a0772c5f9f4f8b0a9eb772fbc7cf20" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.452043 4707 scope.go:117] "RemoveContainer" containerID="c08fcf6af4a4dedbcb342a46ef1efba311ce73a66c97814da572d9a1d3fd3a2d" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.466721 4707 scope.go:117] "RemoveContainer" containerID="7b6f551b73eac7c0f462fb6104fb5fafa41f25d3f7b8c94cb4208b9b3afd56e4" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.479905 4707 scope.go:117] "RemoveContainer" containerID="0073bd0aed7a7e95bb2a3ac2f10e48f908a0772c5f9f4f8b0a9eb772fbc7cf20" Dec 04 09:41:52 crc kubenswrapper[4707]: E1204 09:41:52.480538 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0073bd0aed7a7e95bb2a3ac2f10e48f908a0772c5f9f4f8b0a9eb772fbc7cf20\": container with ID starting with 0073bd0aed7a7e95bb2a3ac2f10e48f908a0772c5f9f4f8b0a9eb772fbc7cf20 not found: ID does not exist" containerID="0073bd0aed7a7e95bb2a3ac2f10e48f908a0772c5f9f4f8b0a9eb772fbc7cf20" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.480604 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0073bd0aed7a7e95bb2a3ac2f10e48f908a0772c5f9f4f8b0a9eb772fbc7cf20"} err="failed to get container status \"0073bd0aed7a7e95bb2a3ac2f10e48f908a0772c5f9f4f8b0a9eb772fbc7cf20\": rpc error: code = NotFound desc = could not find container \"0073bd0aed7a7e95bb2a3ac2f10e48f908a0772c5f9f4f8b0a9eb772fbc7cf20\": container with ID starting with 0073bd0aed7a7e95bb2a3ac2f10e48f908a0772c5f9f4f8b0a9eb772fbc7cf20 not found: ID does not exist" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.480649 4707 scope.go:117] "RemoveContainer" containerID="c08fcf6af4a4dedbcb342a46ef1efba311ce73a66c97814da572d9a1d3fd3a2d" Dec 04 09:41:52 crc kubenswrapper[4707]: E1204 09:41:52.481211 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c08fcf6af4a4dedbcb342a46ef1efba311ce73a66c97814da572d9a1d3fd3a2d\": container with ID starting with c08fcf6af4a4dedbcb342a46ef1efba311ce73a66c97814da572d9a1d3fd3a2d not found: ID does not exist" containerID="c08fcf6af4a4dedbcb342a46ef1efba311ce73a66c97814da572d9a1d3fd3a2d" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.481248 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c08fcf6af4a4dedbcb342a46ef1efba311ce73a66c97814da572d9a1d3fd3a2d"} err="failed to get container status \"c08fcf6af4a4dedbcb342a46ef1efba311ce73a66c97814da572d9a1d3fd3a2d\": rpc error: code = NotFound desc = could not find container \"c08fcf6af4a4dedbcb342a46ef1efba311ce73a66c97814da572d9a1d3fd3a2d\": container with ID starting with c08fcf6af4a4dedbcb342a46ef1efba311ce73a66c97814da572d9a1d3fd3a2d not found: ID does not exist" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.481273 4707 scope.go:117] "RemoveContainer" containerID="7b6f551b73eac7c0f462fb6104fb5fafa41f25d3f7b8c94cb4208b9b3afd56e4" Dec 04 09:41:52 crc kubenswrapper[4707]: E1204 09:41:52.481556 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b6f551b73eac7c0f462fb6104fb5fafa41f25d3f7b8c94cb4208b9b3afd56e4\": container with ID starting with 7b6f551b73eac7c0f462fb6104fb5fafa41f25d3f7b8c94cb4208b9b3afd56e4 not found: ID does not exist" containerID="7b6f551b73eac7c0f462fb6104fb5fafa41f25d3f7b8c94cb4208b9b3afd56e4" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.481586 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b6f551b73eac7c0f462fb6104fb5fafa41f25d3f7b8c94cb4208b9b3afd56e4"} err="failed to get container status \"7b6f551b73eac7c0f462fb6104fb5fafa41f25d3f7b8c94cb4208b9b3afd56e4\": rpc error: code = NotFound desc = could not find container \"7b6f551b73eac7c0f462fb6104fb5fafa41f25d3f7b8c94cb4208b9b3afd56e4\": container with ID starting with 7b6f551b73eac7c0f462fb6104fb5fafa41f25d3f7b8c94cb4208b9b3afd56e4 not found: ID does not exist" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.851455 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48677f6e-8c16-480b-aad8-d87ffe093fca" path="/var/lib/kubelet/pods/48677f6e-8c16-480b-aad8-d87ffe093fca/volumes" Dec 04 09:41:52 crc kubenswrapper[4707]: I1204 09:41:52.852409 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ad7168e-fdaa-4830-b423-e981f1640f15" path="/var/lib/kubelet/pods/9ad7168e-fdaa-4830-b423-e981f1640f15/volumes" Dec 04 09:41:56 crc kubenswrapper[4707]: I1204 09:41:56.859204 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:41:57 crc kubenswrapper[4707]: I1204 09:41:57.357941 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rttms"] Dec 04 09:42:00 crc kubenswrapper[4707]: I1204 09:42:00.817633 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:42:00 crc kubenswrapper[4707]: I1204 09:42:00.817973 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:42:00 crc kubenswrapper[4707]: I1204 09:42:00.818027 4707 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:42:00 crc kubenswrapper[4707]: I1204 09:42:00.818699 4707 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc"} pod="openshift-machine-config-operator/machine-config-daemon-c244z" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 09:42:00 crc kubenswrapper[4707]: I1204 09:42:00.818768 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" containerID="cri-o://d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc" gracePeriod=600 Dec 04 09:42:04 crc kubenswrapper[4707]: I1204 09:42:04.430746 4707 generic.go:334] "Generic (PLEG): container finished" podID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerID="d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc" exitCode=0 Dec 04 09:42:04 crc kubenswrapper[4707]: I1204 09:42:04.430826 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerDied","Data":"d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc"} Dec 04 09:42:05 crc kubenswrapper[4707]: I1204 09:42:05.440249 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerStarted","Data":"f92df9cc0b8fd804a06257feebcda7fbf11147429c710e85020f10339b40deac"} Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.853651 4707 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.854575 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2" gracePeriod=15 Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.854634 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276" gracePeriod=15 Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.854668 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684" gracePeriod=15 Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.854679 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13" gracePeriod=15 Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.854717 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d" gracePeriod=15 Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855054 4707 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855277 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ad7168e-fdaa-4830-b423-e981f1640f15" containerName="extract-utilities" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855290 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ad7168e-fdaa-4830-b423-e981f1640f15" containerName="extract-utilities" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855301 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bca5523-13da-4a24-8436-3780fe794fd4" containerName="extract-utilities" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855307 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bca5523-13da-4a24-8436-3780fe794fd4" containerName="extract-utilities" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855316 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855323 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855348 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855354 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855366 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855372 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855381 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ad7168e-fdaa-4830-b423-e981f1640f15" containerName="extract-content" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855387 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ad7168e-fdaa-4830-b423-e981f1640f15" containerName="extract-content" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855396 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ad7168e-fdaa-4830-b423-e981f1640f15" containerName="registry-server" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855402 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ad7168e-fdaa-4830-b423-e981f1640f15" containerName="registry-server" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855410 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48677f6e-8c16-480b-aad8-d87ffe093fca" containerName="extract-utilities" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855415 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="48677f6e-8c16-480b-aad8-d87ffe093fca" containerName="extract-utilities" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855424 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48677f6e-8c16-480b-aad8-d87ffe093fca" containerName="registry-server" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855429 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="48677f6e-8c16-480b-aad8-d87ffe093fca" containerName="registry-server" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855438 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855443 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855451 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855458 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855467 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e905028-4918-4514-a35a-74f753b746ab" containerName="extract-content" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855474 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e905028-4918-4514-a35a-74f753b746ab" containerName="extract-content" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855482 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855488 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855495 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e905028-4918-4514-a35a-74f753b746ab" containerName="extract-utilities" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855501 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e905028-4918-4514-a35a-74f753b746ab" containerName="extract-utilities" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855510 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e905028-4918-4514-a35a-74f753b746ab" containerName="registry-server" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855516 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e905028-4918-4514-a35a-74f753b746ab" containerName="registry-server" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855525 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bca5523-13da-4a24-8436-3780fe794fd4" containerName="extract-content" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855532 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bca5523-13da-4a24-8436-3780fe794fd4" containerName="extract-content" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855538 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bca5523-13da-4a24-8436-3780fe794fd4" containerName="registry-server" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855545 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bca5523-13da-4a24-8436-3780fe794fd4" containerName="registry-server" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855552 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48677f6e-8c16-480b-aad8-d87ffe093fca" containerName="extract-content" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855557 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="48677f6e-8c16-480b-aad8-d87ffe093fca" containerName="extract-content" Dec 04 09:42:10 crc kubenswrapper[4707]: E1204 09:42:10.855565 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855570 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855657 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855667 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855674 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bca5523-13da-4a24-8436-3780fe794fd4" containerName="registry-server" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855682 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="48677f6e-8c16-480b-aad8-d87ffe093fca" containerName="registry-server" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855689 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855696 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855705 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e905028-4918-4514-a35a-74f753b746ab" containerName="registry-server" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855713 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855721 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ad7168e-fdaa-4830-b423-e981f1640f15" containerName="registry-server" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.855728 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.856884 4707 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.857282 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.861233 4707 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 04 09:42:10 crc kubenswrapper[4707]: I1204 09:42:10.898903 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.003578 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.003835 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.003971 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.004087 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.004208 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.004479 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.004602 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.004736 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105627 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105697 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105726 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105752 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105774 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105805 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105811 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105847 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105869 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105895 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105902 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105943 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105958 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105995 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.106007 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.105996 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.201934 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:11 crc kubenswrapper[4707]: E1204 09:42:11.229989 4707 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.180:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187df9d718cfa331 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-04 09:42:11.228648241 +0000 UTC m=+230.664470748,LastTimestamp:2025-12-04 09:42:11.228648241 +0000 UTC m=+230.664470748,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.477701 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"64b04e66f13d4993447b1f5e043a6e7943450c072b7739d0842ff1b47abca82a"} Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.478156 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"348c8e07c242abfa8400466766ceb02dc17f34a90de3cad18b1ed34f874f930c"} Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.483470 4707 generic.go:334] "Generic (PLEG): container finished" podID="c38ed60f-f162-4163-8502-17b0d8369f08" containerID="a6ea5d99d8ef202f4d606a6710adebf026ca3ee3194f250e039db5e1f17ac859" exitCode=0 Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.483548 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"c38ed60f-f162-4163-8502-17b0d8369f08","Type":"ContainerDied","Data":"a6ea5d99d8ef202f4d606a6710adebf026ca3ee3194f250e039db5e1f17ac859"} Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.484364 4707 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.484888 4707 status_manager.go:851] "Failed to get status for pod" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.485721 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.486707 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.487385 4707 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276" exitCode=0 Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.487404 4707 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d" exitCode=0 Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.487410 4707 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13" exitCode=0 Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.487416 4707 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684" exitCode=2 Dec 04 09:42:11 crc kubenswrapper[4707]: I1204 09:42:11.487448 4707 scope.go:117] "RemoveContainer" containerID="426eb0f35207e5a1ca3dd4ef432de6d2f391638ba958cc2a6fac9e66f9baf66d" Dec 04 09:42:12 crc kubenswrapper[4707]: I1204 09:42:12.495076 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 04 09:42:12 crc kubenswrapper[4707]: I1204 09:42:12.496549 4707 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:12 crc kubenswrapper[4707]: I1204 09:42:12.496758 4707 status_manager.go:851] "Failed to get status for pod" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:12 crc kubenswrapper[4707]: I1204 09:42:12.757068 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 04 09:42:12 crc kubenswrapper[4707]: I1204 09:42:12.757921 4707 status_manager.go:851] "Failed to get status for pod" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:12 crc kubenswrapper[4707]: I1204 09:42:12.758215 4707 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:12 crc kubenswrapper[4707]: I1204 09:42:12.938535 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/c38ed60f-f162-4163-8502-17b0d8369f08-var-lock\") pod \"c38ed60f-f162-4163-8502-17b0d8369f08\" (UID: \"c38ed60f-f162-4163-8502-17b0d8369f08\") " Dec 04 09:42:12 crc kubenswrapper[4707]: I1204 09:42:12.938670 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c38ed60f-f162-4163-8502-17b0d8369f08-kube-api-access\") pod \"c38ed60f-f162-4163-8502-17b0d8369f08\" (UID: \"c38ed60f-f162-4163-8502-17b0d8369f08\") " Dec 04 09:42:12 crc kubenswrapper[4707]: I1204 09:42:12.938715 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c38ed60f-f162-4163-8502-17b0d8369f08-kubelet-dir\") pod \"c38ed60f-f162-4163-8502-17b0d8369f08\" (UID: \"c38ed60f-f162-4163-8502-17b0d8369f08\") " Dec 04 09:42:12 crc kubenswrapper[4707]: I1204 09:42:12.939110 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c38ed60f-f162-4163-8502-17b0d8369f08-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c38ed60f-f162-4163-8502-17b0d8369f08" (UID: "c38ed60f-f162-4163-8502-17b0d8369f08"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:42:12 crc kubenswrapper[4707]: I1204 09:42:12.939133 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c38ed60f-f162-4163-8502-17b0d8369f08-var-lock" (OuterVolumeSpecName: "var-lock") pod "c38ed60f-f162-4163-8502-17b0d8369f08" (UID: "c38ed60f-f162-4163-8502-17b0d8369f08"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:42:12 crc kubenswrapper[4707]: I1204 09:42:12.950959 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c38ed60f-f162-4163-8502-17b0d8369f08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c38ed60f-f162-4163-8502-17b0d8369f08" (UID: "c38ed60f-f162-4163-8502-17b0d8369f08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:42:13 crc kubenswrapper[4707]: I1204 09:42:13.040719 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c38ed60f-f162-4163-8502-17b0d8369f08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:13 crc kubenswrapper[4707]: I1204 09:42:13.042968 4707 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c38ed60f-f162-4163-8502-17b0d8369f08-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:13 crc kubenswrapper[4707]: I1204 09:42:13.042992 4707 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/c38ed60f-f162-4163-8502-17b0d8369f08-var-lock\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:13 crc kubenswrapper[4707]: I1204 09:42:13.503706 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"c38ed60f-f162-4163-8502-17b0d8369f08","Type":"ContainerDied","Data":"8438461b7a7ac7e92f7d110b1e7936b4803cb610b439d793dbe077a01d937ed6"} Dec 04 09:42:13 crc kubenswrapper[4707]: I1204 09:42:13.503746 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8438461b7a7ac7e92f7d110b1e7936b4803cb610b439d793dbe077a01d937ed6" Dec 04 09:42:13 crc kubenswrapper[4707]: I1204 09:42:13.503768 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 04 09:42:13 crc kubenswrapper[4707]: I1204 09:42:13.517048 4707 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:13 crc kubenswrapper[4707]: I1204 09:42:13.517285 4707 status_manager.go:851] "Failed to get status for pod" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.516886 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.517929 4707 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2" exitCode=0 Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.581141 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.582909 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.583564 4707 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.583893 4707 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.584134 4707 status_manager.go:851] "Failed to get status for pod" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.772250 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.772435 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.772484 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.772604 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.772699 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.772695 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.773009 4707 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.773047 4707 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.773071 4707 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:14 crc kubenswrapper[4707]: I1204 09:42:14.853551 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.526139 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.526964 4707 scope.go:117] "RemoveContainer" containerID="ddc88f48e40554c71636da16f045f897e1f18b5b5c7d57d1a2c51d8ad2574276" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.527130 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.528442 4707 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.528802 4707 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.529222 4707 status_manager.go:851] "Failed to get status for pod" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.530282 4707 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.530689 4707 status_manager.go:851] "Failed to get status for pod" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.530894 4707 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.543505 4707 scope.go:117] "RemoveContainer" containerID="bb5277d2ac823425eae82f00bd60c24c08f72354d3ec94633a2f1710416e756d" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.554933 4707 scope.go:117] "RemoveContainer" containerID="cb53083453f6f3c1f912c19ccb06ae82827b20a267ee237a4108d8775a357e13" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.571239 4707 scope.go:117] "RemoveContainer" containerID="efbdf103598a4612baec7096d0fae8c75aa025a42bc498993e62bb0c6c8c1684" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.583567 4707 scope.go:117] "RemoveContainer" containerID="517b9d6192e45c36ddd74f14b5bdd1f885ad10cb2774c93cf5b3bd24370257d2" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.598505 4707 scope.go:117] "RemoveContainer" containerID="f1a0db4b78fc775e13cfcad2bf3a6719d28343680dbbef06a2deb7acc24f5172" Dec 04 09:42:15 crc kubenswrapper[4707]: E1204 09:42:15.871101 4707 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:15 crc kubenswrapper[4707]: E1204 09:42:15.871694 4707 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:15 crc kubenswrapper[4707]: E1204 09:42:15.871987 4707 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:15 crc kubenswrapper[4707]: E1204 09:42:15.872379 4707 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:15 crc kubenswrapper[4707]: E1204 09:42:15.872794 4707 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:15 crc kubenswrapper[4707]: I1204 09:42:15.872828 4707 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 04 09:42:15 crc kubenswrapper[4707]: E1204 09:42:15.873142 4707 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" interval="200ms" Dec 04 09:42:16 crc kubenswrapper[4707]: E1204 09:42:16.074412 4707 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" interval="400ms" Dec 04 09:42:16 crc kubenswrapper[4707]: E1204 09:42:16.475094 4707 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" interval="800ms" Dec 04 09:42:16 crc kubenswrapper[4707]: E1204 09:42:16.759931 4707 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.180:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187df9d718cfa331 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-04 09:42:11.228648241 +0000 UTC m=+230.664470748,LastTimestamp:2025-12-04 09:42:11.228648241 +0000 UTC m=+230.664470748,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 04 09:42:17 crc kubenswrapper[4707]: E1204 09:42:17.276100 4707 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" interval="1.6s" Dec 04 09:42:18 crc kubenswrapper[4707]: E1204 09:42:18.877383 4707 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" interval="3.2s" Dec 04 09:42:20 crc kubenswrapper[4707]: I1204 09:42:20.848839 4707 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:20 crc kubenswrapper[4707]: I1204 09:42:20.849961 4707 status_manager.go:851] "Failed to get status for pod" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:22 crc kubenswrapper[4707]: E1204 09:42:22.078691 4707 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.180:6443: connect: connection refused" interval="6.4s" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.386873 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" podUID="916e5756-f645-44f7-b26d-706a87c57ed8" containerName="oauth-openshift" containerID="cri-o://5249534dae1a114a662aa6a69f7a7a05a231d80d84ce2896c2def18f8847a7d0" gracePeriod=15 Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.571829 4707 generic.go:334] "Generic (PLEG): container finished" podID="916e5756-f645-44f7-b26d-706a87c57ed8" containerID="5249534dae1a114a662aa6a69f7a7a05a231d80d84ce2896c2def18f8847a7d0" exitCode=0 Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.571880 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" event={"ID":"916e5756-f645-44f7-b26d-706a87c57ed8","Type":"ContainerDied","Data":"5249534dae1a114a662aa6a69f7a7a05a231d80d84ce2896c2def18f8847a7d0"} Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.753909 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.754939 4707 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.755288 4707 status_manager.go:851] "Failed to get status for pod" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.755630 4707 status_manager.go:851] "Failed to get status for pod" podUID="916e5756-f645-44f7-b26d-706a87c57ed8" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-rttms\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.878003 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-error\") pod \"916e5756-f645-44f7-b26d-706a87c57ed8\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.878098 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-audit-policies\") pod \"916e5756-f645-44f7-b26d-706a87c57ed8\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.878123 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-cliconfig\") pod \"916e5756-f645-44f7-b26d-706a87c57ed8\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.878166 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-serving-cert\") pod \"916e5756-f645-44f7-b26d-706a87c57ed8\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.878206 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-ocp-branding-template\") pod \"916e5756-f645-44f7-b26d-706a87c57ed8\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.878248 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p68fv\" (UniqueName: \"kubernetes.io/projected/916e5756-f645-44f7-b26d-706a87c57ed8-kube-api-access-p68fv\") pod \"916e5756-f645-44f7-b26d-706a87c57ed8\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.878284 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-provider-selection\") pod \"916e5756-f645-44f7-b26d-706a87c57ed8\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.878330 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-session\") pod \"916e5756-f645-44f7-b26d-706a87c57ed8\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.878376 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-idp-0-file-data\") pod \"916e5756-f645-44f7-b26d-706a87c57ed8\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.878555 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-router-certs\") pod \"916e5756-f645-44f7-b26d-706a87c57ed8\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.879270 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "916e5756-f645-44f7-b26d-706a87c57ed8" (UID: "916e5756-f645-44f7-b26d-706a87c57ed8"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.879519 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "916e5756-f645-44f7-b26d-706a87c57ed8" (UID: "916e5756-f645-44f7-b26d-706a87c57ed8"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.879561 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-trusted-ca-bundle\") pod \"916e5756-f645-44f7-b26d-706a87c57ed8\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.879629 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-service-ca\") pod \"916e5756-f645-44f7-b26d-706a87c57ed8\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.879731 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/916e5756-f645-44f7-b26d-706a87c57ed8-audit-dir\") pod \"916e5756-f645-44f7-b26d-706a87c57ed8\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.879797 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-login\") pod \"916e5756-f645-44f7-b26d-706a87c57ed8\" (UID: \"916e5756-f645-44f7-b26d-706a87c57ed8\") " Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.880380 4707 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.880407 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.880647 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "916e5756-f645-44f7-b26d-706a87c57ed8" (UID: "916e5756-f645-44f7-b26d-706a87c57ed8"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.880685 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/916e5756-f645-44f7-b26d-706a87c57ed8-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "916e5756-f645-44f7-b26d-706a87c57ed8" (UID: "916e5756-f645-44f7-b26d-706a87c57ed8"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.881565 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "916e5756-f645-44f7-b26d-706a87c57ed8" (UID: "916e5756-f645-44f7-b26d-706a87c57ed8"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.887237 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "916e5756-f645-44f7-b26d-706a87c57ed8" (UID: "916e5756-f645-44f7-b26d-706a87c57ed8"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.887583 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "916e5756-f645-44f7-b26d-706a87c57ed8" (UID: "916e5756-f645-44f7-b26d-706a87c57ed8"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.888146 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/916e5756-f645-44f7-b26d-706a87c57ed8-kube-api-access-p68fv" (OuterVolumeSpecName: "kube-api-access-p68fv") pod "916e5756-f645-44f7-b26d-706a87c57ed8" (UID: "916e5756-f645-44f7-b26d-706a87c57ed8"). InnerVolumeSpecName "kube-api-access-p68fv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.888783 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "916e5756-f645-44f7-b26d-706a87c57ed8" (UID: "916e5756-f645-44f7-b26d-706a87c57ed8"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.889219 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "916e5756-f645-44f7-b26d-706a87c57ed8" (UID: "916e5756-f645-44f7-b26d-706a87c57ed8"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.889484 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "916e5756-f645-44f7-b26d-706a87c57ed8" (UID: "916e5756-f645-44f7-b26d-706a87c57ed8"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.889592 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "916e5756-f645-44f7-b26d-706a87c57ed8" (UID: "916e5756-f645-44f7-b26d-706a87c57ed8"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.889901 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "916e5756-f645-44f7-b26d-706a87c57ed8" (UID: "916e5756-f645-44f7-b26d-706a87c57ed8"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.890131 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "916e5756-f645-44f7-b26d-706a87c57ed8" (UID: "916e5756-f645-44f7-b26d-706a87c57ed8"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.981779 4707 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/916e5756-f645-44f7-b26d-706a87c57ed8-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.981851 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.981864 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.981874 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.981886 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.981897 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p68fv\" (UniqueName: \"kubernetes.io/projected/916e5756-f645-44f7-b26d-706a87c57ed8-kube-api-access-p68fv\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.981915 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.981932 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.981944 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.981956 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.981969 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:22 crc kubenswrapper[4707]: I1204 09:42:22.981982 4707 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/916e5756-f645-44f7-b26d-706a87c57ed8-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.578301 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" event={"ID":"916e5756-f645-44f7-b26d-706a87c57ed8","Type":"ContainerDied","Data":"b2010ea8593f0fa02b55bceb4c95eb62bb92bfa77c3bde5348406c3879def7f7"} Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.578613 4707 scope.go:117] "RemoveContainer" containerID="5249534dae1a114a662aa6a69f7a7a05a231d80d84ce2896c2def18f8847a7d0" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.578413 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.580691 4707 status_manager.go:851] "Failed to get status for pod" podUID="916e5756-f645-44f7-b26d-706a87c57ed8" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-rttms\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.580993 4707 status_manager.go:851] "Failed to get status for pod" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.581358 4707 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.591568 4707 status_manager.go:851] "Failed to get status for pod" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.591797 4707 status_manager.go:851] "Failed to get status for pod" podUID="916e5756-f645-44f7-b26d-706a87c57ed8" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-rttms\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.592005 4707 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.844607 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.845688 4707 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.846424 4707 status_manager.go:851] "Failed to get status for pod" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.847002 4707 status_manager.go:851] "Failed to get status for pod" podUID="916e5756-f645-44f7-b26d-706a87c57ed8" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-rttms\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.860445 4707 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf78fd6f-682e-41e6-bacd-244ca3f802e8" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.860478 4707 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf78fd6f-682e-41e6-bacd-244ca3f802e8" Dec 04 09:42:23 crc kubenswrapper[4707]: E1204 09:42:23.860917 4707 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:23 crc kubenswrapper[4707]: I1204 09:42:23.861528 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:23 crc kubenswrapper[4707]: W1204 09:42:23.884873 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-e98d7910e38806c2613b4b83ec8f7177f2961c22b671558bdb12d0b52e5476e9 WatchSource:0}: Error finding container e98d7910e38806c2613b4b83ec8f7177f2961c22b671558bdb12d0b52e5476e9: Status 404 returned error can't find the container with id e98d7910e38806c2613b4b83ec8f7177f2961c22b671558bdb12d0b52e5476e9 Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.587141 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.587205 4707 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899" exitCode=1 Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.587276 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899"} Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.587841 4707 scope.go:117] "RemoveContainer" containerID="d82c4ef4b3d7a3db61b7a276ff47cc9bb24e0d7295b96f399865bfc68f9c5899" Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.588441 4707 status_manager.go:851] "Failed to get status for pod" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.588992 4707 status_manager.go:851] "Failed to get status for pod" podUID="916e5756-f645-44f7-b26d-706a87c57ed8" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-rttms\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.589401 4707 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.589917 4707 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.589974 4707 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="c9e4e4e9097e94a5c6f80c7b194c4cb02bb51e6648140b3388bbe7a3081e34f6" exitCode=0 Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.590012 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"c9e4e4e9097e94a5c6f80c7b194c4cb02bb51e6648140b3388bbe7a3081e34f6"} Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.590048 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e98d7910e38806c2613b4b83ec8f7177f2961c22b671558bdb12d0b52e5476e9"} Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.590390 4707 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf78fd6f-682e-41e6-bacd-244ca3f802e8" Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.590415 4707 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf78fd6f-682e-41e6-bacd-244ca3f802e8" Dec 04 09:42:24 crc kubenswrapper[4707]: E1204 09:42:24.590674 4707 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.590682 4707 status_manager.go:851] "Failed to get status for pod" podUID="916e5756-f645-44f7-b26d-706a87c57ed8" pod="openshift-authentication/oauth-openshift-558db77b4-rttms" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-rttms\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.591122 4707 status_manager.go:851] "Failed to get status for pod" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.591448 4707 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:24 crc kubenswrapper[4707]: I1204 09:42:24.591702 4707 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.180:6443: connect: connection refused" Dec 04 09:42:25 crc kubenswrapper[4707]: I1204 09:42:25.604162 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 04 09:42:25 crc kubenswrapper[4707]: I1204 09:42:25.604491 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"03bd24727d9cf7b7471f83d091ea3bcf647b3610ae5453f09bc019ffa0ad8698"} Dec 04 09:42:25 crc kubenswrapper[4707]: I1204 09:42:25.613959 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"11ba624341350aaa04a34afe34de1748108c0cc9099c32e9261390849755200b"} Dec 04 09:42:25 crc kubenswrapper[4707]: I1204 09:42:25.614003 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8bbf22ecaa3485bf6341f22296d8a34319db2c53a4091c05b1df07427c9d9dcf"} Dec 04 09:42:26 crc kubenswrapper[4707]: I1204 09:42:26.622653 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7dd7619cc5f200922ba96fde8b398ce83819a7a033aa7254d0e20ec6fb0bc77e"} Dec 04 09:42:26 crc kubenswrapper[4707]: I1204 09:42:26.622706 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"20b7dcc9af536d51ae21e3b7ba6042a484dd0745027b430491872f056cb8c884"} Dec 04 09:42:26 crc kubenswrapper[4707]: I1204 09:42:26.622721 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"395df72ba0b9d7acc341f883c4052948e384167a55fe14dd45d05d73b34a3432"} Dec 04 09:42:26 crc kubenswrapper[4707]: I1204 09:42:26.622880 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:26 crc kubenswrapper[4707]: I1204 09:42:26.622968 4707 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf78fd6f-682e-41e6-bacd-244ca3f802e8" Dec 04 09:42:26 crc kubenswrapper[4707]: I1204 09:42:26.622996 4707 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf78fd6f-682e-41e6-bacd-244ca3f802e8" Dec 04 09:42:28 crc kubenswrapper[4707]: I1204 09:42:28.861981 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:28 crc kubenswrapper[4707]: I1204 09:42:28.862325 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:28 crc kubenswrapper[4707]: I1204 09:42:28.867444 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:31 crc kubenswrapper[4707]: I1204 09:42:31.062669 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:42:31 crc kubenswrapper[4707]: I1204 09:42:31.634491 4707 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:31 crc kubenswrapper[4707]: I1204 09:42:31.636444 4707 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf78fd6f-682e-41e6-bacd-244ca3f802e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:42:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:42:24Z\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:42:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T09:42:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bbf22ecaa3485bf6341f22296d8a34319db2c53a4091c05b1df07427c9d9dcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:42:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://395df72ba0b9d7acc341f883c4052948e384167a55fe14dd45d05d73b34a3432\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:42:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11ba624341350aaa04a34afe34de1748108c0cc9099c32e9261390849755200b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:42:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dd7619cc5f200922ba96fde8b398ce83819a7a033aa7254d0e20ec6fb0bc77e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:42:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b7dcc9af536d51ae21e3b7ba6042a484dd0745027b430491872f056cb8c884\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T09:42:26Z\\\"}}}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e4e4e9097e94a5c6f80c7b194c4cb02bb51e6648140b3388bbe7a3081e34f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9e4e4e9097e94a5c6f80c7b194c4cb02bb51e6648140b3388bbe7a3081e34f6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T09:42:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T09:42:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}]}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": pods \"kube-apiserver-crc\" not found" Dec 04 09:42:32 crc kubenswrapper[4707]: I1204 09:42:32.074135 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:42:32 crc kubenswrapper[4707]: I1204 09:42:32.078016 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:42:32 crc kubenswrapper[4707]: I1204 09:42:32.080195 4707 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="4e23471f-b167-4f8f-9811-2918300e9789" Dec 04 09:42:32 crc kubenswrapper[4707]: I1204 09:42:32.652548 4707 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf78fd6f-682e-41e6-bacd-244ca3f802e8" Dec 04 09:42:32 crc kubenswrapper[4707]: I1204 09:42:32.652620 4707 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf78fd6f-682e-41e6-bacd-244ca3f802e8" Dec 04 09:42:32 crc kubenswrapper[4707]: I1204 09:42:32.656264 4707 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="4e23471f-b167-4f8f-9811-2918300e9789" Dec 04 09:42:32 crc kubenswrapper[4707]: I1204 09:42:32.656859 4707 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://8bbf22ecaa3485bf6341f22296d8a34319db2c53a4091c05b1df07427c9d9dcf" Dec 04 09:42:32 crc kubenswrapper[4707]: I1204 09:42:32.656891 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:33 crc kubenswrapper[4707]: I1204 09:42:33.661355 4707 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf78fd6f-682e-41e6-bacd-244ca3f802e8" Dec 04 09:42:33 crc kubenswrapper[4707]: I1204 09:42:33.661392 4707 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cf78fd6f-682e-41e6-bacd-244ca3f802e8" Dec 04 09:42:33 crc kubenswrapper[4707]: I1204 09:42:33.666457 4707 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="4e23471f-b167-4f8f-9811-2918300e9789" Dec 04 09:42:40 crc kubenswrapper[4707]: I1204 09:42:40.804543 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 04 09:42:41 crc kubenswrapper[4707]: I1204 09:42:41.067011 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 09:42:41 crc kubenswrapper[4707]: I1204 09:42:41.873444 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 04 09:42:42 crc kubenswrapper[4707]: I1204 09:42:42.274921 4707 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 04 09:42:42 crc kubenswrapper[4707]: I1204 09:42:42.848235 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 04 09:42:42 crc kubenswrapper[4707]: I1204 09:42:42.900469 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 04 09:42:43 crc kubenswrapper[4707]: I1204 09:42:43.395697 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 04 09:42:43 crc kubenswrapper[4707]: I1204 09:42:43.568719 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 04 09:42:43 crc kubenswrapper[4707]: I1204 09:42:43.628836 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 04 09:42:43 crc kubenswrapper[4707]: I1204 09:42:43.690874 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 04 09:42:44 crc kubenswrapper[4707]: I1204 09:42:44.062972 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 04 09:42:44 crc kubenswrapper[4707]: I1204 09:42:44.104128 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 04 09:42:44 crc kubenswrapper[4707]: I1204 09:42:44.162745 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 04 09:42:44 crc kubenswrapper[4707]: I1204 09:42:44.176410 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 04 09:42:44 crc kubenswrapper[4707]: I1204 09:42:44.258763 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 04 09:42:44 crc kubenswrapper[4707]: I1204 09:42:44.453469 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 04 09:42:44 crc kubenswrapper[4707]: I1204 09:42:44.465278 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 04 09:42:44 crc kubenswrapper[4707]: I1204 09:42:44.479170 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 04 09:42:44 crc kubenswrapper[4707]: I1204 09:42:44.504453 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 04 09:42:44 crc kubenswrapper[4707]: I1204 09:42:44.580773 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 04 09:42:44 crc kubenswrapper[4707]: I1204 09:42:44.703990 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 04 09:42:44 crc kubenswrapper[4707]: I1204 09:42:44.853575 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.091759 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.125691 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.160259 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.206401 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.367523 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.386570 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.395423 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.506976 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.593061 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.664017 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.814139 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.824088 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.864468 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.907216 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 04 09:42:45 crc kubenswrapper[4707]: I1204 09:42:45.958084 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.007892 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.071506 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.108532 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.204554 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.268147 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.396015 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.406767 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.557239 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.558493 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.693214 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.726093 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.747669 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.764071 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.802948 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.929021 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.941867 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.953829 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 04 09:42:46 crc kubenswrapper[4707]: I1204 09:42:46.989526 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.044741 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.152853 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.154399 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.172035 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.264094 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.278732 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.343699 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.350229 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.567806 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.615036 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.621019 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.633969 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.730248 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.747647 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.804296 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 04 09:42:47 crc kubenswrapper[4707]: I1204 09:42:47.916624 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.132624 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.181148 4707 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.227477 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.238845 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.258228 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.388657 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.409878 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.410247 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.466778 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.542911 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.612228 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.627400 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.637394 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.661482 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.720153 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.768550 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.769783 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.772021 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.845762 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.930669 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.973081 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 04 09:42:48 crc kubenswrapper[4707]: I1204 09:42:48.981515 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.141688 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.152687 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.260940 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.337025 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.529634 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.575939 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.638603 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.662278 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.679127 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.695434 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.753603 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.788967 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.792907 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.802187 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.806382 4707 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.808251 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=39.8082378 podStartE2EDuration="39.8082378s" podCreationTimestamp="2025-12-04 09:42:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:42:31.517180224 +0000 UTC m=+250.953002731" watchObservedRunningTime="2025-12-04 09:42:49.8082378 +0000 UTC m=+269.244060307" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.810087 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-rttms"] Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.810128 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.812485 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.818807 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.823781 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=18.823761171 podStartE2EDuration="18.823761171s" podCreationTimestamp="2025-12-04 09:42:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:42:49.82370407 +0000 UTC m=+269.259526587" watchObservedRunningTime="2025-12-04 09:42:49.823761171 +0000 UTC m=+269.259583678" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.838866 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.861393 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.862016 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.868834 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.890710 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.894800 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.945792 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.978292 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 04 09:42:49 crc kubenswrapper[4707]: I1204 09:42:49.993471 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.106854 4707 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.110824 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6676954577-7m699"] Dec 04 09:42:50 crc kubenswrapper[4707]: E1204 09:42:50.111060 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="916e5756-f645-44f7-b26d-706a87c57ed8" containerName="oauth-openshift" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.111082 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="916e5756-f645-44f7-b26d-706a87c57ed8" containerName="oauth-openshift" Dec 04 09:42:50 crc kubenswrapper[4707]: E1204 09:42:50.111101 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" containerName="installer" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.111107 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" containerName="installer" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.111187 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="c38ed60f-f162-4163-8502-17b0d8369f08" containerName="installer" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.111201 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="916e5756-f645-44f7-b26d-706a87c57ed8" containerName="oauth-openshift" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.111287 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.111578 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.116038 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.116399 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.117036 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.118240 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.118381 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.118589 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.118643 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.118650 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.118697 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.118612 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.118850 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.119237 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.128169 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.128295 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.130482 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.131294 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.134523 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.217220 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-user-template-login\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.217286 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.217323 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-router-certs\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.217369 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh4hz\" (UniqueName: \"kubernetes.io/projected/823a3fed-937a-4831-9adf-f4f907158ebb-kube-api-access-gh4hz\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.217403 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/823a3fed-937a-4831-9adf-f4f907158ebb-audit-dir\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.217425 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.217453 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.217487 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-session\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.217521 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-service-ca\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.217544 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/823a3fed-937a-4831-9adf-f4f907158ebb-audit-policies\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.217566 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.217597 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.217620 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.217646 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-user-template-error\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.269452 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.276844 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.318545 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-service-ca\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.318606 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/823a3fed-937a-4831-9adf-f4f907158ebb-audit-policies\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.318635 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.318669 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.318691 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.318715 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-user-template-error\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.318764 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.318784 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-user-template-login\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.318817 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-router-certs\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.318838 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh4hz\" (UniqueName: \"kubernetes.io/projected/823a3fed-937a-4831-9adf-f4f907158ebb-kube-api-access-gh4hz\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.318867 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/823a3fed-937a-4831-9adf-f4f907158ebb-audit-dir\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.318887 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.318906 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.318929 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-session\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.319584 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/823a3fed-937a-4831-9adf-f4f907158ebb-audit-dir\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.320654 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.320650 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-service-ca\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.320780 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/823a3fed-937a-4831-9adf-f4f907158ebb-audit-policies\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.325265 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-router-certs\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.325253 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.325833 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-session\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.326742 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-user-template-error\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.327569 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.327645 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-user-template-login\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.327749 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.328717 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.335883 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/823a3fed-937a-4831-9adf-f4f907158ebb-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.338094 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh4hz\" (UniqueName: \"kubernetes.io/projected/823a3fed-937a-4831-9adf-f4f907158ebb-kube-api-access-gh4hz\") pod \"oauth-openshift-6676954577-7m699\" (UID: \"823a3fed-937a-4831-9adf-f4f907158ebb\") " pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.387165 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.420770 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.432206 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.660417 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.669943 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.723132 4707 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.723211 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.838776 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.851715 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="916e5756-f645-44f7-b26d-706a87c57ed8" path="/var/lib/kubelet/pods/916e5756-f645-44f7-b26d-706a87c57ed8/volumes" Dec 04 09:42:50 crc kubenswrapper[4707]: I1204 09:42:50.981806 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.062737 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.103589 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.136984 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.180025 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.200900 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.293822 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.317273 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.455543 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.495712 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.581750 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.598706 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.729455 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.824351 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.858492 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.980219 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 04 09:42:51 crc kubenswrapper[4707]: I1204 09:42:51.985216 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.006940 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.007955 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.027026 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.074545 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.120031 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.134147 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.141555 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.245758 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.249184 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.268784 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.287442 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.300362 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.328401 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.434756 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.507858 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.574933 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.630037 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.678858 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.679588 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.682068 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.787406 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 04 09:42:52 crc kubenswrapper[4707]: I1204 09:42:52.824679 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 04 09:42:53 crc kubenswrapper[4707]: I1204 09:42:53.048424 4707 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 04 09:42:53 crc kubenswrapper[4707]: I1204 09:42:53.097838 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 04 09:42:53 crc kubenswrapper[4707]: I1204 09:42:53.242383 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 04 09:42:53 crc kubenswrapper[4707]: I1204 09:42:53.252544 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 04 09:42:53 crc kubenswrapper[4707]: I1204 09:42:53.427235 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 04 09:42:53 crc kubenswrapper[4707]: I1204 09:42:53.439972 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 04 09:42:53 crc kubenswrapper[4707]: I1204 09:42:53.458790 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6676954577-7m699"] Dec 04 09:42:53 crc kubenswrapper[4707]: I1204 09:42:53.544081 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 04 09:42:53 crc kubenswrapper[4707]: I1204 09:42:53.620755 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 04 09:42:53 crc kubenswrapper[4707]: I1204 09:42:53.626705 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 04 09:42:53 crc kubenswrapper[4707]: I1204 09:42:53.626933 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 04 09:42:53 crc kubenswrapper[4707]: I1204 09:42:53.944366 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 04 09:42:53 crc kubenswrapper[4707]: I1204 09:42:53.997432 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 04 09:42:53 crc kubenswrapper[4707]: I1204 09:42:53.999126 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.009737 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.012404 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.040247 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.063769 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.085478 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 04 09:42:54 crc kubenswrapper[4707]: E1204 09:42:54.135262 4707 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 04 09:42:54 crc kubenswrapper[4707]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-6676954577-7m699_openshift-authentication_823a3fed-937a-4831-9adf-f4f907158ebb_0(7795706777e3294f4a67bbf8bcebd38710fecd86908ee5e4359a115fb75273fb): error adding pod openshift-authentication_oauth-openshift-6676954577-7m699 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"7795706777e3294f4a67bbf8bcebd38710fecd86908ee5e4359a115fb75273fb" Netns:"/var/run/netns/24822744-88ff-4252-b612-2b900be5bbd7" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-6676954577-7m699;K8S_POD_INFRA_CONTAINER_ID=7795706777e3294f4a67bbf8bcebd38710fecd86908ee5e4359a115fb75273fb;K8S_POD_UID=823a3fed-937a-4831-9adf-f4f907158ebb" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-6676954577-7m699] networking: Multus: [openshift-authentication/oauth-openshift-6676954577-7m699/823a3fed-937a-4831-9adf-f4f907158ebb]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-6676954577-7m699 in out of cluster comm: pod "oauth-openshift-6676954577-7m699" not found Dec 04 09:42:54 crc kubenswrapper[4707]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 04 09:42:54 crc kubenswrapper[4707]: > Dec 04 09:42:54 crc kubenswrapper[4707]: E1204 09:42:54.135383 4707 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 04 09:42:54 crc kubenswrapper[4707]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-6676954577-7m699_openshift-authentication_823a3fed-937a-4831-9adf-f4f907158ebb_0(7795706777e3294f4a67bbf8bcebd38710fecd86908ee5e4359a115fb75273fb): error adding pod openshift-authentication_oauth-openshift-6676954577-7m699 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"7795706777e3294f4a67bbf8bcebd38710fecd86908ee5e4359a115fb75273fb" Netns:"/var/run/netns/24822744-88ff-4252-b612-2b900be5bbd7" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-6676954577-7m699;K8S_POD_INFRA_CONTAINER_ID=7795706777e3294f4a67bbf8bcebd38710fecd86908ee5e4359a115fb75273fb;K8S_POD_UID=823a3fed-937a-4831-9adf-f4f907158ebb" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-6676954577-7m699] networking: Multus: [openshift-authentication/oauth-openshift-6676954577-7m699/823a3fed-937a-4831-9adf-f4f907158ebb]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-6676954577-7m699 in out of cluster comm: pod "oauth-openshift-6676954577-7m699" not found Dec 04 09:42:54 crc kubenswrapper[4707]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 04 09:42:54 crc kubenswrapper[4707]: > pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:54 crc kubenswrapper[4707]: E1204 09:42:54.135411 4707 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 04 09:42:54 crc kubenswrapper[4707]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-6676954577-7m699_openshift-authentication_823a3fed-937a-4831-9adf-f4f907158ebb_0(7795706777e3294f4a67bbf8bcebd38710fecd86908ee5e4359a115fb75273fb): error adding pod openshift-authentication_oauth-openshift-6676954577-7m699 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"7795706777e3294f4a67bbf8bcebd38710fecd86908ee5e4359a115fb75273fb" Netns:"/var/run/netns/24822744-88ff-4252-b612-2b900be5bbd7" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-6676954577-7m699;K8S_POD_INFRA_CONTAINER_ID=7795706777e3294f4a67bbf8bcebd38710fecd86908ee5e4359a115fb75273fb;K8S_POD_UID=823a3fed-937a-4831-9adf-f4f907158ebb" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-6676954577-7m699] networking: Multus: [openshift-authentication/oauth-openshift-6676954577-7m699/823a3fed-937a-4831-9adf-f4f907158ebb]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-6676954577-7m699 in out of cluster comm: pod "oauth-openshift-6676954577-7m699" not found Dec 04 09:42:54 crc kubenswrapper[4707]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 04 09:42:54 crc kubenswrapper[4707]: > pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:54 crc kubenswrapper[4707]: E1204 09:42:54.135472 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"oauth-openshift-6676954577-7m699_openshift-authentication(823a3fed-937a-4831-9adf-f4f907158ebb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"oauth-openshift-6676954577-7m699_openshift-authentication(823a3fed-937a-4831-9adf-f4f907158ebb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-6676954577-7m699_openshift-authentication_823a3fed-937a-4831-9adf-f4f907158ebb_0(7795706777e3294f4a67bbf8bcebd38710fecd86908ee5e4359a115fb75273fb): error adding pod openshift-authentication_oauth-openshift-6676954577-7m699 to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"7795706777e3294f4a67bbf8bcebd38710fecd86908ee5e4359a115fb75273fb\\\" Netns:\\\"/var/run/netns/24822744-88ff-4252-b612-2b900be5bbd7\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-6676954577-7m699;K8S_POD_INFRA_CONTAINER_ID=7795706777e3294f4a67bbf8bcebd38710fecd86908ee5e4359a115fb75273fb;K8S_POD_UID=823a3fed-937a-4831-9adf-f4f907158ebb\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-6676954577-7m699] networking: Multus: [openshift-authentication/oauth-openshift-6676954577-7m699/823a3fed-937a-4831-9adf-f4f907158ebb]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-6676954577-7m699 in out of cluster comm: pod \\\"oauth-openshift-6676954577-7m699\\\" not found\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-authentication/oauth-openshift-6676954577-7m699" podUID="823a3fed-937a-4831-9adf-f4f907158ebb" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.181351 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.198050 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.204045 4707 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.204326 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://64b04e66f13d4993447b1f5e043a6e7943450c072b7739d0842ff1b47abca82a" gracePeriod=5 Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.209356 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.248279 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.254162 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.295497 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.298677 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.304657 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.520313 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.744484 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.777434 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.777838 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.832366 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.923462 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 04 09:42:54 crc kubenswrapper[4707]: I1204 09:42:54.928546 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 04 09:42:55 crc kubenswrapper[4707]: I1204 09:42:55.320450 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 04 09:42:55 crc kubenswrapper[4707]: I1204 09:42:55.412059 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 04 09:42:55 crc kubenswrapper[4707]: I1204 09:42:55.424077 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 04 09:42:55 crc kubenswrapper[4707]: I1204 09:42:55.856715 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 04 09:42:55 crc kubenswrapper[4707]: I1204 09:42:55.983297 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 04 09:42:56 crc kubenswrapper[4707]: I1204 09:42:56.038922 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 04 09:42:56 crc kubenswrapper[4707]: I1204 09:42:56.100636 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 04 09:42:56 crc kubenswrapper[4707]: I1204 09:42:56.111542 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 04 09:42:56 crc kubenswrapper[4707]: I1204 09:42:56.173020 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 04 09:42:56 crc kubenswrapper[4707]: I1204 09:42:56.174634 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 04 09:42:56 crc kubenswrapper[4707]: I1204 09:42:56.184291 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 04 09:42:56 crc kubenswrapper[4707]: I1204 09:42:56.204305 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 04 09:42:56 crc kubenswrapper[4707]: I1204 09:42:56.279944 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 04 09:42:56 crc kubenswrapper[4707]: I1204 09:42:56.380688 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 04 09:42:56 crc kubenswrapper[4707]: I1204 09:42:56.456799 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 04 09:42:56 crc kubenswrapper[4707]: I1204 09:42:56.725498 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 04 09:42:57 crc kubenswrapper[4707]: I1204 09:42:57.021007 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 04 09:42:57 crc kubenswrapper[4707]: I1204 09:42:57.022756 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 04 09:42:57 crc kubenswrapper[4707]: I1204 09:42:57.049360 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 04 09:42:57 crc kubenswrapper[4707]: I1204 09:42:57.110566 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 04 09:42:57 crc kubenswrapper[4707]: I1204 09:42:57.154090 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 04 09:42:57 crc kubenswrapper[4707]: I1204 09:42:57.228345 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 04 09:42:57 crc kubenswrapper[4707]: I1204 09:42:57.258973 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 04 09:42:57 crc kubenswrapper[4707]: I1204 09:42:57.352725 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 04 09:42:57 crc kubenswrapper[4707]: I1204 09:42:57.605328 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 04 09:42:57 crc kubenswrapper[4707]: I1204 09:42:57.758944 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 04 09:42:57 crc kubenswrapper[4707]: E1204 09:42:57.815475 4707 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 04 09:42:57 crc kubenswrapper[4707]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-6676954577-7m699_openshift-authentication_823a3fed-937a-4831-9adf-f4f907158ebb_0(8c208673c51387a88432e6d34fff492d7ccb6600dbf6a397671792ce59f03587): error adding pod openshift-authentication_oauth-openshift-6676954577-7m699 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"8c208673c51387a88432e6d34fff492d7ccb6600dbf6a397671792ce59f03587" Netns:"/var/run/netns/27d5b603-2a22-41fb-8195-724e100ce3e4" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-6676954577-7m699;K8S_POD_INFRA_CONTAINER_ID=8c208673c51387a88432e6d34fff492d7ccb6600dbf6a397671792ce59f03587;K8S_POD_UID=823a3fed-937a-4831-9adf-f4f907158ebb" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-6676954577-7m699] networking: Multus: [openshift-authentication/oauth-openshift-6676954577-7m699/823a3fed-937a-4831-9adf-f4f907158ebb]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-6676954577-7m699 in out of cluster comm: pod "oauth-openshift-6676954577-7m699" not found Dec 04 09:42:57 crc kubenswrapper[4707]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 04 09:42:57 crc kubenswrapper[4707]: > Dec 04 09:42:57 crc kubenswrapper[4707]: E1204 09:42:57.815544 4707 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 04 09:42:57 crc kubenswrapper[4707]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-6676954577-7m699_openshift-authentication_823a3fed-937a-4831-9adf-f4f907158ebb_0(8c208673c51387a88432e6d34fff492d7ccb6600dbf6a397671792ce59f03587): error adding pod openshift-authentication_oauth-openshift-6676954577-7m699 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"8c208673c51387a88432e6d34fff492d7ccb6600dbf6a397671792ce59f03587" Netns:"/var/run/netns/27d5b603-2a22-41fb-8195-724e100ce3e4" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-6676954577-7m699;K8S_POD_INFRA_CONTAINER_ID=8c208673c51387a88432e6d34fff492d7ccb6600dbf6a397671792ce59f03587;K8S_POD_UID=823a3fed-937a-4831-9adf-f4f907158ebb" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-6676954577-7m699] networking: Multus: [openshift-authentication/oauth-openshift-6676954577-7m699/823a3fed-937a-4831-9adf-f4f907158ebb]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-6676954577-7m699 in out of cluster comm: pod "oauth-openshift-6676954577-7m699" not found Dec 04 09:42:57 crc kubenswrapper[4707]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 04 09:42:57 crc kubenswrapper[4707]: > pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:57 crc kubenswrapper[4707]: E1204 09:42:57.815572 4707 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 04 09:42:57 crc kubenswrapper[4707]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-6676954577-7m699_openshift-authentication_823a3fed-937a-4831-9adf-f4f907158ebb_0(8c208673c51387a88432e6d34fff492d7ccb6600dbf6a397671792ce59f03587): error adding pod openshift-authentication_oauth-openshift-6676954577-7m699 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"8c208673c51387a88432e6d34fff492d7ccb6600dbf6a397671792ce59f03587" Netns:"/var/run/netns/27d5b603-2a22-41fb-8195-724e100ce3e4" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-6676954577-7m699;K8S_POD_INFRA_CONTAINER_ID=8c208673c51387a88432e6d34fff492d7ccb6600dbf6a397671792ce59f03587;K8S_POD_UID=823a3fed-937a-4831-9adf-f4f907158ebb" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-6676954577-7m699] networking: Multus: [openshift-authentication/oauth-openshift-6676954577-7m699/823a3fed-937a-4831-9adf-f4f907158ebb]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-6676954577-7m699 in out of cluster comm: pod "oauth-openshift-6676954577-7m699" not found Dec 04 09:42:57 crc kubenswrapper[4707]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 04 09:42:57 crc kubenswrapper[4707]: > pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:42:57 crc kubenswrapper[4707]: E1204 09:42:57.815626 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"oauth-openshift-6676954577-7m699_openshift-authentication(823a3fed-937a-4831-9adf-f4f907158ebb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"oauth-openshift-6676954577-7m699_openshift-authentication(823a3fed-937a-4831-9adf-f4f907158ebb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-6676954577-7m699_openshift-authentication_823a3fed-937a-4831-9adf-f4f907158ebb_0(8c208673c51387a88432e6d34fff492d7ccb6600dbf6a397671792ce59f03587): error adding pod openshift-authentication_oauth-openshift-6676954577-7m699 to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"8c208673c51387a88432e6d34fff492d7ccb6600dbf6a397671792ce59f03587\\\" Netns:\\\"/var/run/netns/27d5b603-2a22-41fb-8195-724e100ce3e4\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-6676954577-7m699;K8S_POD_INFRA_CONTAINER_ID=8c208673c51387a88432e6d34fff492d7ccb6600dbf6a397671792ce59f03587;K8S_POD_UID=823a3fed-937a-4831-9adf-f4f907158ebb\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-6676954577-7m699] networking: Multus: [openshift-authentication/oauth-openshift-6676954577-7m699/823a3fed-937a-4831-9adf-f4f907158ebb]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-6676954577-7m699 in out of cluster comm: pod \\\"oauth-openshift-6676954577-7m699\\\" not found\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-authentication/oauth-openshift-6676954577-7m699" podUID="823a3fed-937a-4831-9adf-f4f907158ebb" Dec 04 09:42:57 crc kubenswrapper[4707]: I1204 09:42:57.860962 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 04 09:42:57 crc kubenswrapper[4707]: I1204 09:42:57.984569 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 04 09:42:58 crc kubenswrapper[4707]: I1204 09:42:58.321132 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 04 09:42:58 crc kubenswrapper[4707]: I1204 09:42:58.505197 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 04 09:42:58 crc kubenswrapper[4707]: I1204 09:42:58.761651 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 04 09:42:58 crc kubenswrapper[4707]: I1204 09:42:58.828751 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.023995 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.180689 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.237057 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.672836 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.769990 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.770088 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.801502 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.801569 4707 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="64b04e66f13d4993447b1f5e043a6e7943450c072b7739d0842ff1b47abca82a" exitCode=137 Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.801630 4707 scope.go:117] "RemoveContainer" containerID="64b04e66f13d4993447b1f5e043a6e7943450c072b7739d0842ff1b47abca82a" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.801682 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.815025 4707 scope.go:117] "RemoveContainer" containerID="64b04e66f13d4993447b1f5e043a6e7943450c072b7739d0842ff1b47abca82a" Dec 04 09:42:59 crc kubenswrapper[4707]: E1204 09:42:59.815537 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64b04e66f13d4993447b1f5e043a6e7943450c072b7739d0842ff1b47abca82a\": container with ID starting with 64b04e66f13d4993447b1f5e043a6e7943450c072b7739d0842ff1b47abca82a not found: ID does not exist" containerID="64b04e66f13d4993447b1f5e043a6e7943450c072b7739d0842ff1b47abca82a" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.815587 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64b04e66f13d4993447b1f5e043a6e7943450c072b7739d0842ff1b47abca82a"} err="failed to get container status \"64b04e66f13d4993447b1f5e043a6e7943450c072b7739d0842ff1b47abca82a\": rpc error: code = NotFound desc = could not find container \"64b04e66f13d4993447b1f5e043a6e7943450c072b7739d0842ff1b47abca82a\": container with ID starting with 64b04e66f13d4993447b1f5e043a6e7943450c072b7739d0842ff1b47abca82a not found: ID does not exist" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.951682 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.951782 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.951821 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.951844 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.951886 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.951897 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.951924 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.951952 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.951990 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.952312 4707 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.952353 4707 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.952367 4707 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.952379 4707 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 04 09:42:59 crc kubenswrapper[4707]: I1204 09:42:59.959306 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:43:00 crc kubenswrapper[4707]: I1204 09:43:00.053847 4707 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:00 crc kubenswrapper[4707]: I1204 09:43:00.212677 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 04 09:43:00 crc kubenswrapper[4707]: I1204 09:43:00.851301 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 04 09:43:00 crc kubenswrapper[4707]: I1204 09:43:00.851841 4707 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 04 09:43:00 crc kubenswrapper[4707]: I1204 09:43:00.861725 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 04 09:43:00 crc kubenswrapper[4707]: I1204 09:43:00.861764 4707 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="daf6370c-f2e4-408e-a234-b9282121b0e2" Dec 04 09:43:00 crc kubenswrapper[4707]: I1204 09:43:00.866531 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 04 09:43:00 crc kubenswrapper[4707]: I1204 09:43:00.866564 4707 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="daf6370c-f2e4-408e-a234-b9282121b0e2" Dec 04 09:43:12 crc kubenswrapper[4707]: I1204 09:43:12.844091 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:43:12 crc kubenswrapper[4707]: I1204 09:43:12.845148 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:43:13 crc kubenswrapper[4707]: I1204 09:43:13.053953 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6676954577-7m699"] Dec 04 09:43:13 crc kubenswrapper[4707]: I1204 09:43:13.888192 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6676954577-7m699" event={"ID":"823a3fed-937a-4831-9adf-f4f907158ebb","Type":"ContainerStarted","Data":"c057e27d4a9eb7136596539e1c198dc44908178022674180dca554a46c86b2d0"} Dec 04 09:43:13 crc kubenswrapper[4707]: I1204 09:43:13.888560 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:43:13 crc kubenswrapper[4707]: I1204 09:43:13.888572 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6676954577-7m699" event={"ID":"823a3fed-937a-4831-9adf-f4f907158ebb","Type":"ContainerStarted","Data":"7f99de1daf23246ef7618309a47c617d61c693b2da2d1315e93e49f9df50de3a"} Dec 04 09:43:13 crc kubenswrapper[4707]: I1204 09:43:13.893484 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6676954577-7m699" Dec 04 09:43:13 crc kubenswrapper[4707]: I1204 09:43:13.908287 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6676954577-7m699" podStartSLOduration=76.908273105 podStartE2EDuration="1m16.908273105s" podCreationTimestamp="2025-12-04 09:41:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:43:13.906111917 +0000 UTC m=+293.341934434" watchObservedRunningTime="2025-12-04 09:43:13.908273105 +0000 UTC m=+293.344095612" Dec 04 09:43:17 crc kubenswrapper[4707]: I1204 09:43:17.908626 4707 generic.go:334] "Generic (PLEG): container finished" podID="f0411515-ac32-4ad1-a956-ce737c8d0d75" containerID="5ea293cf21194305a675510f4b4c7e7f0097cc2919231a8170612e9c0145805f" exitCode=0 Dec 04 09:43:17 crc kubenswrapper[4707]: I1204 09:43:17.909082 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" event={"ID":"f0411515-ac32-4ad1-a956-ce737c8d0d75","Type":"ContainerDied","Data":"5ea293cf21194305a675510f4b4c7e7f0097cc2919231a8170612e9c0145805f"} Dec 04 09:43:17 crc kubenswrapper[4707]: I1204 09:43:17.911185 4707 scope.go:117] "RemoveContainer" containerID="5ea293cf21194305a675510f4b4c7e7f0097cc2919231a8170612e9c0145805f" Dec 04 09:43:18 crc kubenswrapper[4707]: I1204 09:43:18.917397 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" event={"ID":"f0411515-ac32-4ad1-a956-ce737c8d0d75","Type":"ContainerStarted","Data":"c980d1254f0d8fb4a796a610dcece477af02acf05b1cbbfc8515567d3a288818"} Dec 04 09:43:18 crc kubenswrapper[4707]: I1204 09:43:18.918562 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:43:18 crc kubenswrapper[4707]: I1204 09:43:18.922062 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.128210 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v52ch"] Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.128770 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" podUID="99bdb744-6ab3-42ac-9729-137102bdfe72" containerName="controller-manager" containerID="cri-o://ea150bf48d5ae4a5275c465b6536c79e798e4a16f077c4be4e18b345f7734e39" gracePeriod=30 Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.294315 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c"] Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.294563 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" podUID="0ebc6de2-21f5-41ae-800d-8ceb365c7b88" containerName="route-controller-manager" containerID="cri-o://c681183f8bd473ee1c01b9c3abf106affa5cb2fcd0dd7bac778e27e630b9abb4" gracePeriod=30 Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.539281 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.634903 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.663690 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-client-ca\") pod \"99bdb744-6ab3-42ac-9729-137102bdfe72\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.664941 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99bdb744-6ab3-42ac-9729-137102bdfe72-serving-cert\") pod \"99bdb744-6ab3-42ac-9729-137102bdfe72\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.665819 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-config\") pod \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.666661 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgl8h\" (UniqueName: \"kubernetes.io/projected/99bdb744-6ab3-42ac-9729-137102bdfe72-kube-api-access-pgl8h\") pod \"99bdb744-6ab3-42ac-9729-137102bdfe72\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.667033 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-client-ca\") pod \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.667146 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdspz\" (UniqueName: \"kubernetes.io/projected/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-kube-api-access-vdspz\") pod \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.667236 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-config\") pod \"99bdb744-6ab3-42ac-9729-137102bdfe72\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.664866 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-client-ca" (OuterVolumeSpecName: "client-ca") pod "99bdb744-6ab3-42ac-9729-137102bdfe72" (UID: "99bdb744-6ab3-42ac-9729-137102bdfe72"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.666600 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-config" (OuterVolumeSpecName: "config") pod "0ebc6de2-21f5-41ae-800d-8ceb365c7b88" (UID: "0ebc6de2-21f5-41ae-800d-8ceb365c7b88"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.667562 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-client-ca" (OuterVolumeSpecName: "client-ca") pod "0ebc6de2-21f5-41ae-800d-8ceb365c7b88" (UID: "0ebc6de2-21f5-41ae-800d-8ceb365c7b88"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.667833 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-serving-cert\") pod \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\" (UID: \"0ebc6de2-21f5-41ae-800d-8ceb365c7b88\") " Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.667934 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-proxy-ca-bundles\") pod \"99bdb744-6ab3-42ac-9729-137102bdfe72\" (UID: \"99bdb744-6ab3-42ac-9729-137102bdfe72\") " Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.668337 4707 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.668468 4707 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.668563 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.669068 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "99bdb744-6ab3-42ac-9729-137102bdfe72" (UID: "99bdb744-6ab3-42ac-9729-137102bdfe72"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.670170 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-config" (OuterVolumeSpecName: "config") pod "99bdb744-6ab3-42ac-9729-137102bdfe72" (UID: "99bdb744-6ab3-42ac-9729-137102bdfe72"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.670873 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99bdb744-6ab3-42ac-9729-137102bdfe72-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "99bdb744-6ab3-42ac-9729-137102bdfe72" (UID: "99bdb744-6ab3-42ac-9729-137102bdfe72"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.671281 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99bdb744-6ab3-42ac-9729-137102bdfe72-kube-api-access-pgl8h" (OuterVolumeSpecName: "kube-api-access-pgl8h") pod "99bdb744-6ab3-42ac-9729-137102bdfe72" (UID: "99bdb744-6ab3-42ac-9729-137102bdfe72"). InnerVolumeSpecName "kube-api-access-pgl8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.672852 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-kube-api-access-vdspz" (OuterVolumeSpecName: "kube-api-access-vdspz") pod "0ebc6de2-21f5-41ae-800d-8ceb365c7b88" (UID: "0ebc6de2-21f5-41ae-800d-8ceb365c7b88"). InnerVolumeSpecName "kube-api-access-vdspz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.673207 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0ebc6de2-21f5-41ae-800d-8ceb365c7b88" (UID: "0ebc6de2-21f5-41ae-800d-8ceb365c7b88"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.770126 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99bdb744-6ab3-42ac-9729-137102bdfe72-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.770160 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgl8h\" (UniqueName: \"kubernetes.io/projected/99bdb744-6ab3-42ac-9729-137102bdfe72-kube-api-access-pgl8h\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.770173 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdspz\" (UniqueName: \"kubernetes.io/projected/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-kube-api-access-vdspz\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.770191 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.770201 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ebc6de2-21f5-41ae-800d-8ceb365c7b88-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.770212 4707 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99bdb744-6ab3-42ac-9729-137102bdfe72-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.949905 4707 generic.go:334] "Generic (PLEG): container finished" podID="0ebc6de2-21f5-41ae-800d-8ceb365c7b88" containerID="c681183f8bd473ee1c01b9c3abf106affa5cb2fcd0dd7bac778e27e630b9abb4" exitCode=0 Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.949968 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" event={"ID":"0ebc6de2-21f5-41ae-800d-8ceb365c7b88","Type":"ContainerDied","Data":"c681183f8bd473ee1c01b9c3abf106affa5cb2fcd0dd7bac778e27e630b9abb4"} Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.949999 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" event={"ID":"0ebc6de2-21f5-41ae-800d-8ceb365c7b88","Type":"ContainerDied","Data":"13478c26febb87d98a7d0031dc5d330ead6e59b047bc470b18fd072595d26f00"} Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.950018 4707 scope.go:117] "RemoveContainer" containerID="c681183f8bd473ee1c01b9c3abf106affa5cb2fcd0dd7bac778e27e630b9abb4" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.950117 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.954486 4707 generic.go:334] "Generic (PLEG): container finished" podID="99bdb744-6ab3-42ac-9729-137102bdfe72" containerID="ea150bf48d5ae4a5275c465b6536c79e798e4a16f077c4be4e18b345f7734e39" exitCode=0 Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.954535 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" event={"ID":"99bdb744-6ab3-42ac-9729-137102bdfe72","Type":"ContainerDied","Data":"ea150bf48d5ae4a5275c465b6536c79e798e4a16f077c4be4e18b345f7734e39"} Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.954566 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" event={"ID":"99bdb744-6ab3-42ac-9729-137102bdfe72","Type":"ContainerDied","Data":"3826634ad4b08364807f9655d0343114323fa447437875623a2cc2fb85fcec14"} Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.954621 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-v52ch" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.973016 4707 scope.go:117] "RemoveContainer" containerID="c681183f8bd473ee1c01b9c3abf106affa5cb2fcd0dd7bac778e27e630b9abb4" Dec 04 09:43:23 crc kubenswrapper[4707]: E1204 09:43:23.973539 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c681183f8bd473ee1c01b9c3abf106affa5cb2fcd0dd7bac778e27e630b9abb4\": container with ID starting with c681183f8bd473ee1c01b9c3abf106affa5cb2fcd0dd7bac778e27e630b9abb4 not found: ID does not exist" containerID="c681183f8bd473ee1c01b9c3abf106affa5cb2fcd0dd7bac778e27e630b9abb4" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.973601 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c681183f8bd473ee1c01b9c3abf106affa5cb2fcd0dd7bac778e27e630b9abb4"} err="failed to get container status \"c681183f8bd473ee1c01b9c3abf106affa5cb2fcd0dd7bac778e27e630b9abb4\": rpc error: code = NotFound desc = could not find container \"c681183f8bd473ee1c01b9c3abf106affa5cb2fcd0dd7bac778e27e630b9abb4\": container with ID starting with c681183f8bd473ee1c01b9c3abf106affa5cb2fcd0dd7bac778e27e630b9abb4 not found: ID does not exist" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.973629 4707 scope.go:117] "RemoveContainer" containerID="ea150bf48d5ae4a5275c465b6536c79e798e4a16f077c4be4e18b345f7734e39" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.988948 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c"] Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.992716 4707 scope.go:117] "RemoveContainer" containerID="ea150bf48d5ae4a5275c465b6536c79e798e4a16f077c4be4e18b345f7734e39" Dec 04 09:43:23 crc kubenswrapper[4707]: E1204 09:43:23.993381 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea150bf48d5ae4a5275c465b6536c79e798e4a16f077c4be4e18b345f7734e39\": container with ID starting with ea150bf48d5ae4a5275c465b6536c79e798e4a16f077c4be4e18b345f7734e39 not found: ID does not exist" containerID="ea150bf48d5ae4a5275c465b6536c79e798e4a16f077c4be4e18b345f7734e39" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.993498 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea150bf48d5ae4a5275c465b6536c79e798e4a16f077c4be4e18b345f7734e39"} err="failed to get container status \"ea150bf48d5ae4a5275c465b6536c79e798e4a16f077c4be4e18b345f7734e39\": rpc error: code = NotFound desc = could not find container \"ea150bf48d5ae4a5275c465b6536c79e798e4a16f077c4be4e18b345f7734e39\": container with ID starting with ea150bf48d5ae4a5275c465b6536c79e798e4a16f077c4be4e18b345f7734e39 not found: ID does not exist" Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.995517 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6fb9c"] Dec 04 09:43:23 crc kubenswrapper[4707]: I1204 09:43:23.999772 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v52ch"] Dec 04 09:43:24 crc kubenswrapper[4707]: I1204 09:43:24.003959 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v52ch"] Dec 04 09:43:24 crc kubenswrapper[4707]: I1204 09:43:24.850489 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ebc6de2-21f5-41ae-800d-8ceb365c7b88" path="/var/lib/kubelet/pods/0ebc6de2-21f5-41ae-800d-8ceb365c7b88/volumes" Dec 04 09:43:24 crc kubenswrapper[4707]: I1204 09:43:24.851297 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99bdb744-6ab3-42ac-9729-137102bdfe72" path="/var/lib/kubelet/pods/99bdb744-6ab3-42ac-9729-137102bdfe72/volumes" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.137212 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2"] Dec 04 09:43:25 crc kubenswrapper[4707]: E1204 09:43:25.137511 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ebc6de2-21f5-41ae-800d-8ceb365c7b88" containerName="route-controller-manager" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.137545 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ebc6de2-21f5-41ae-800d-8ceb365c7b88" containerName="route-controller-manager" Dec 04 09:43:25 crc kubenswrapper[4707]: E1204 09:43:25.137562 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99bdb744-6ab3-42ac-9729-137102bdfe72" containerName="controller-manager" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.137568 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="99bdb744-6ab3-42ac-9729-137102bdfe72" containerName="controller-manager" Dec 04 09:43:25 crc kubenswrapper[4707]: E1204 09:43:25.137587 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.137594 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.137692 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ebc6de2-21f5-41ae-800d-8ceb365c7b88" containerName="route-controller-manager" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.137702 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.137711 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="99bdb744-6ab3-42ac-9729-137102bdfe72" containerName="controller-manager" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.138103 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.140079 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.140478 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.140647 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.141491 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.141536 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.141804 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.146082 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5947857f89-g755t"] Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.147653 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.149916 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.150336 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.150587 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.150765 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.150920 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.153601 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2"] Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.154443 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.158634 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.160095 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5947857f89-g755t"] Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.189574 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-proxy-ca-bundles\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.189652 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9ffq\" (UniqueName: \"kubernetes.io/projected/20ade1ff-b3cd-46ae-9ad5-a298e5397ccf-kube-api-access-h9ffq\") pod \"route-controller-manager-7bb885d9c5-frlv2\" (UID: \"20ade1ff-b3cd-46ae-9ad5-a298e5397ccf\") " pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.189715 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-config\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.189745 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20ade1ff-b3cd-46ae-9ad5-a298e5397ccf-serving-cert\") pod \"route-controller-manager-7bb885d9c5-frlv2\" (UID: \"20ade1ff-b3cd-46ae-9ad5-a298e5397ccf\") " pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.189776 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20ade1ff-b3cd-46ae-9ad5-a298e5397ccf-config\") pod \"route-controller-manager-7bb885d9c5-frlv2\" (UID: \"20ade1ff-b3cd-46ae-9ad5-a298e5397ccf\") " pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.189806 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/996acfd6-def0-4d6a-9d54-439c67902b92-serving-cert\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.189833 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/20ade1ff-b3cd-46ae-9ad5-a298e5397ccf-client-ca\") pod \"route-controller-manager-7bb885d9c5-frlv2\" (UID: \"20ade1ff-b3cd-46ae-9ad5-a298e5397ccf\") " pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.189880 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp27n\" (UniqueName: \"kubernetes.io/projected/996acfd6-def0-4d6a-9d54-439c67902b92-kube-api-access-sp27n\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.189909 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-client-ca\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.290702 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-proxy-ca-bundles\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.290936 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9ffq\" (UniqueName: \"kubernetes.io/projected/20ade1ff-b3cd-46ae-9ad5-a298e5397ccf-kube-api-access-h9ffq\") pod \"route-controller-manager-7bb885d9c5-frlv2\" (UID: \"20ade1ff-b3cd-46ae-9ad5-a298e5397ccf\") " pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.291053 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-config\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.291130 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20ade1ff-b3cd-46ae-9ad5-a298e5397ccf-serving-cert\") pod \"route-controller-manager-7bb885d9c5-frlv2\" (UID: \"20ade1ff-b3cd-46ae-9ad5-a298e5397ccf\") " pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.291262 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20ade1ff-b3cd-46ae-9ad5-a298e5397ccf-config\") pod \"route-controller-manager-7bb885d9c5-frlv2\" (UID: \"20ade1ff-b3cd-46ae-9ad5-a298e5397ccf\") " pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.291335 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/996acfd6-def0-4d6a-9d54-439c67902b92-serving-cert\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.291421 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/20ade1ff-b3cd-46ae-9ad5-a298e5397ccf-client-ca\") pod \"route-controller-manager-7bb885d9c5-frlv2\" (UID: \"20ade1ff-b3cd-46ae-9ad5-a298e5397ccf\") " pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.291622 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp27n\" (UniqueName: \"kubernetes.io/projected/996acfd6-def0-4d6a-9d54-439c67902b92-kube-api-access-sp27n\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.291724 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-client-ca\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.292472 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-proxy-ca-bundles\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.292480 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-client-ca\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.292630 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-config\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.293289 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/20ade1ff-b3cd-46ae-9ad5-a298e5397ccf-client-ca\") pod \"route-controller-manager-7bb885d9c5-frlv2\" (UID: \"20ade1ff-b3cd-46ae-9ad5-a298e5397ccf\") " pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.295309 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20ade1ff-b3cd-46ae-9ad5-a298e5397ccf-config\") pod \"route-controller-manager-7bb885d9c5-frlv2\" (UID: \"20ade1ff-b3cd-46ae-9ad5-a298e5397ccf\") " pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.300368 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/996acfd6-def0-4d6a-9d54-439c67902b92-serving-cert\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.300846 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20ade1ff-b3cd-46ae-9ad5-a298e5397ccf-serving-cert\") pod \"route-controller-manager-7bb885d9c5-frlv2\" (UID: \"20ade1ff-b3cd-46ae-9ad5-a298e5397ccf\") " pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.310411 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9ffq\" (UniqueName: \"kubernetes.io/projected/20ade1ff-b3cd-46ae-9ad5-a298e5397ccf-kube-api-access-h9ffq\") pod \"route-controller-manager-7bb885d9c5-frlv2\" (UID: \"20ade1ff-b3cd-46ae-9ad5-a298e5397ccf\") " pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.312119 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp27n\" (UniqueName: \"kubernetes.io/projected/996acfd6-def0-4d6a-9d54-439c67902b92-kube-api-access-sp27n\") pod \"controller-manager-5947857f89-g755t\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.462154 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.474844 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.657891 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2"] Dec 04 09:43:25 crc kubenswrapper[4707]: W1204 09:43:25.666994 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20ade1ff_b3cd_46ae_9ad5_a298e5397ccf.slice/crio-ad0ca09b6c098970d46c75d678c7184f67d03ddc09e717927062a669d4205d99 WatchSource:0}: Error finding container ad0ca09b6c098970d46c75d678c7184f67d03ddc09e717927062a669d4205d99: Status 404 returned error can't find the container with id ad0ca09b6c098970d46c75d678c7184f67d03ddc09e717927062a669d4205d99 Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.700204 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5947857f89-g755t"] Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.968392 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" event={"ID":"20ade1ff-b3cd-46ae-9ad5-a298e5397ccf","Type":"ContainerStarted","Data":"15923a98ba2bf627684e46e31f5f5480d083e6d371fd5268f76378aa1b62cb35"} Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.968474 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" event={"ID":"20ade1ff-b3cd-46ae-9ad5-a298e5397ccf","Type":"ContainerStarted","Data":"ad0ca09b6c098970d46c75d678c7184f67d03ddc09e717927062a669d4205d99"} Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.968694 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.969756 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5947857f89-g755t" event={"ID":"996acfd6-def0-4d6a-9d54-439c67902b92","Type":"ContainerStarted","Data":"dfcb01658c1520d7e4dd9810a2b3b544dfef4ccc6ffae36d5d4db0d8a23d3636"} Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.969779 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5947857f89-g755t" event={"ID":"996acfd6-def0-4d6a-9d54-439c67902b92","Type":"ContainerStarted","Data":"6792fac8da3668a32bc3cfc2ab024d9d9094cca987d24e653857adaa6219f396"} Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.970155 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.975227 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:25 crc kubenswrapper[4707]: I1204 09:43:25.996035 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" podStartSLOduration=2.996012296 podStartE2EDuration="2.996012296s" podCreationTimestamp="2025-12-04 09:43:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:43:25.993130796 +0000 UTC m=+305.428953323" watchObservedRunningTime="2025-12-04 09:43:25.996012296 +0000 UTC m=+305.431834803" Dec 04 09:43:26 crc kubenswrapper[4707]: I1204 09:43:26.021475 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5947857f89-g755t" podStartSLOduration=3.021455263 podStartE2EDuration="3.021455263s" podCreationTimestamp="2025-12-04 09:43:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:43:26.019145092 +0000 UTC m=+305.454967609" watchObservedRunningTime="2025-12-04 09:43:26.021455263 +0000 UTC m=+305.457277760" Dec 04 09:43:26 crc kubenswrapper[4707]: I1204 09:43:26.280434 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7bb885d9c5-frlv2" Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.133242 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5947857f89-g755t"] Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.134369 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5947857f89-g755t" podUID="996acfd6-def0-4d6a-9d54-439c67902b92" containerName="controller-manager" containerID="cri-o://dfcb01658c1520d7e4dd9810a2b3b544dfef4ccc6ffae36d5d4db0d8a23d3636" gracePeriod=30 Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.644523 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.820230 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-client-ca\") pod \"996acfd6-def0-4d6a-9d54-439c67902b92\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.820275 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/996acfd6-def0-4d6a-9d54-439c67902b92-serving-cert\") pod \"996acfd6-def0-4d6a-9d54-439c67902b92\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.820309 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-config\") pod \"996acfd6-def0-4d6a-9d54-439c67902b92\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.820406 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-proxy-ca-bundles\") pod \"996acfd6-def0-4d6a-9d54-439c67902b92\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.820482 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sp27n\" (UniqueName: \"kubernetes.io/projected/996acfd6-def0-4d6a-9d54-439c67902b92-kube-api-access-sp27n\") pod \"996acfd6-def0-4d6a-9d54-439c67902b92\" (UID: \"996acfd6-def0-4d6a-9d54-439c67902b92\") " Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.821179 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "996acfd6-def0-4d6a-9d54-439c67902b92" (UID: "996acfd6-def0-4d6a-9d54-439c67902b92"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.821188 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-client-ca" (OuterVolumeSpecName: "client-ca") pod "996acfd6-def0-4d6a-9d54-439c67902b92" (UID: "996acfd6-def0-4d6a-9d54-439c67902b92"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.821290 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-config" (OuterVolumeSpecName: "config") pod "996acfd6-def0-4d6a-9d54-439c67902b92" (UID: "996acfd6-def0-4d6a-9d54-439c67902b92"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.825090 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/996acfd6-def0-4d6a-9d54-439c67902b92-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "996acfd6-def0-4d6a-9d54-439c67902b92" (UID: "996acfd6-def0-4d6a-9d54-439c67902b92"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.825158 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/996acfd6-def0-4d6a-9d54-439c67902b92-kube-api-access-sp27n" (OuterVolumeSpecName: "kube-api-access-sp27n") pod "996acfd6-def0-4d6a-9d54-439c67902b92" (UID: "996acfd6-def0-4d6a-9d54-439c67902b92"). InnerVolumeSpecName "kube-api-access-sp27n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.921961 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sp27n\" (UniqueName: \"kubernetes.io/projected/996acfd6-def0-4d6a-9d54-439c67902b92-kube-api-access-sp27n\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.922012 4707 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.922038 4707 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/996acfd6-def0-4d6a-9d54-439c67902b92-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.922058 4707 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:43 crc kubenswrapper[4707]: I1204 09:43:43.922089 4707 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/996acfd6-def0-4d6a-9d54-439c67902b92-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 04 09:43:44 crc kubenswrapper[4707]: I1204 09:43:44.064572 4707 generic.go:334] "Generic (PLEG): container finished" podID="996acfd6-def0-4d6a-9d54-439c67902b92" containerID="dfcb01658c1520d7e4dd9810a2b3b544dfef4ccc6ffae36d5d4db0d8a23d3636" exitCode=0 Dec 04 09:43:44 crc kubenswrapper[4707]: I1204 09:43:44.064615 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5947857f89-g755t" event={"ID":"996acfd6-def0-4d6a-9d54-439c67902b92","Type":"ContainerDied","Data":"dfcb01658c1520d7e4dd9810a2b3b544dfef4ccc6ffae36d5d4db0d8a23d3636"} Dec 04 09:43:44 crc kubenswrapper[4707]: I1204 09:43:44.064641 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5947857f89-g755t" Dec 04 09:43:44 crc kubenswrapper[4707]: I1204 09:43:44.064671 4707 scope.go:117] "RemoveContainer" containerID="dfcb01658c1520d7e4dd9810a2b3b544dfef4ccc6ffae36d5d4db0d8a23d3636" Dec 04 09:43:44 crc kubenswrapper[4707]: I1204 09:43:44.064649 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5947857f89-g755t" event={"ID":"996acfd6-def0-4d6a-9d54-439c67902b92","Type":"ContainerDied","Data":"6792fac8da3668a32bc3cfc2ab024d9d9094cca987d24e653857adaa6219f396"} Dec 04 09:43:44 crc kubenswrapper[4707]: I1204 09:43:44.084897 4707 scope.go:117] "RemoveContainer" containerID="dfcb01658c1520d7e4dd9810a2b3b544dfef4ccc6ffae36d5d4db0d8a23d3636" Dec 04 09:43:44 crc kubenswrapper[4707]: E1204 09:43:44.085244 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfcb01658c1520d7e4dd9810a2b3b544dfef4ccc6ffae36d5d4db0d8a23d3636\": container with ID starting with dfcb01658c1520d7e4dd9810a2b3b544dfef4ccc6ffae36d5d4db0d8a23d3636 not found: ID does not exist" containerID="dfcb01658c1520d7e4dd9810a2b3b544dfef4ccc6ffae36d5d4db0d8a23d3636" Dec 04 09:43:44 crc kubenswrapper[4707]: I1204 09:43:44.085281 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfcb01658c1520d7e4dd9810a2b3b544dfef4ccc6ffae36d5d4db0d8a23d3636"} err="failed to get container status \"dfcb01658c1520d7e4dd9810a2b3b544dfef4ccc6ffae36d5d4db0d8a23d3636\": rpc error: code = NotFound desc = could not find container \"dfcb01658c1520d7e4dd9810a2b3b544dfef4ccc6ffae36d5d4db0d8a23d3636\": container with ID starting with dfcb01658c1520d7e4dd9810a2b3b544dfef4ccc6ffae36d5d4db0d8a23d3636 not found: ID does not exist" Dec 04 09:43:44 crc kubenswrapper[4707]: I1204 09:43:44.103483 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5947857f89-g755t"] Dec 04 09:43:44 crc kubenswrapper[4707]: I1204 09:43:44.106963 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5947857f89-g755t"] Dec 04 09:43:44 crc kubenswrapper[4707]: I1204 09:43:44.853093 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="996acfd6-def0-4d6a-9d54-439c67902b92" path="/var/lib/kubelet/pods/996acfd6-def0-4d6a-9d54-439c67902b92/volumes" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.152946 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-bb5d6679-4l626"] Dec 04 09:43:45 crc kubenswrapper[4707]: E1204 09:43:45.153613 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="996acfd6-def0-4d6a-9d54-439c67902b92" containerName="controller-manager" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.153728 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="996acfd6-def0-4d6a-9d54-439c67902b92" containerName="controller-manager" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.153912 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="996acfd6-def0-4d6a-9d54-439c67902b92" containerName="controller-manager" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.154521 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.156129 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.157146 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.157201 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.159573 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.159794 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.159838 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.165797 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-bb5d6679-4l626"] Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.169097 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.338418 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ede0432-b867-4c13-a914-5aec9349084e-config\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.338496 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6ede0432-b867-4c13-a914-5aec9349084e-proxy-ca-bundles\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.338522 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49b5x\" (UniqueName: \"kubernetes.io/projected/6ede0432-b867-4c13-a914-5aec9349084e-kube-api-access-49b5x\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.338548 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ede0432-b867-4c13-a914-5aec9349084e-serving-cert\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.338578 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6ede0432-b867-4c13-a914-5aec9349084e-client-ca\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.439966 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ede0432-b867-4c13-a914-5aec9349084e-serving-cert\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.440045 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6ede0432-b867-4c13-a914-5aec9349084e-client-ca\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.440101 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ede0432-b867-4c13-a914-5aec9349084e-config\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.440146 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6ede0432-b867-4c13-a914-5aec9349084e-proxy-ca-bundles\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.440174 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49b5x\" (UniqueName: \"kubernetes.io/projected/6ede0432-b867-4c13-a914-5aec9349084e-kube-api-access-49b5x\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.441937 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6ede0432-b867-4c13-a914-5aec9349084e-client-ca\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.441937 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6ede0432-b867-4c13-a914-5aec9349084e-proxy-ca-bundles\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.442518 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ede0432-b867-4c13-a914-5aec9349084e-config\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.445616 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ede0432-b867-4c13-a914-5aec9349084e-serving-cert\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.456499 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49b5x\" (UniqueName: \"kubernetes.io/projected/6ede0432-b867-4c13-a914-5aec9349084e-kube-api-access-49b5x\") pod \"controller-manager-bb5d6679-4l626\" (UID: \"6ede0432-b867-4c13-a914-5aec9349084e\") " pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.472314 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:45 crc kubenswrapper[4707]: I1204 09:43:45.875797 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-bb5d6679-4l626"] Dec 04 09:43:46 crc kubenswrapper[4707]: I1204 09:43:46.076310 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" event={"ID":"6ede0432-b867-4c13-a914-5aec9349084e","Type":"ContainerStarted","Data":"a844cd8f2d02829edc2e7583ddaf852094c855e35c7f810e942b43c17719a36e"} Dec 04 09:43:46 crc kubenswrapper[4707]: I1204 09:43:46.076376 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" event={"ID":"6ede0432-b867-4c13-a914-5aec9349084e","Type":"ContainerStarted","Data":"9076f9ca8207667ecf0b370b3fcf6e354d7f74e2116a266fc79c418c909efddb"} Dec 04 09:43:46 crc kubenswrapper[4707]: I1204 09:43:46.077328 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:43:46 crc kubenswrapper[4707]: I1204 09:43:46.078444 4707 patch_prober.go:28] interesting pod/controller-manager-bb5d6679-4l626 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" start-of-body= Dec 04 09:43:46 crc kubenswrapper[4707]: I1204 09:43:46.078513 4707 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" podUID="6ede0432-b867-4c13-a914-5aec9349084e" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" Dec 04 09:43:46 crc kubenswrapper[4707]: I1204 09:43:46.096712 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" podStartSLOduration=3.096688952 podStartE2EDuration="3.096688952s" podCreationTimestamp="2025-12-04 09:43:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:43:46.094915018 +0000 UTC m=+325.530737545" watchObservedRunningTime="2025-12-04 09:43:46.096688952 +0000 UTC m=+325.532511469" Dec 04 09:43:47 crc kubenswrapper[4707]: I1204 09:43:47.088568 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-bb5d6679-4l626" Dec 04 09:44:19 crc kubenswrapper[4707]: I1204 09:44:19.891341 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lzd69"] Dec 04 09:44:19 crc kubenswrapper[4707]: I1204 09:44:19.892548 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:19 crc kubenswrapper[4707]: I1204 09:44:19.909551 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lzd69"] Dec 04 09:44:19 crc kubenswrapper[4707]: I1204 09:44:19.963065 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpfqk\" (UniqueName: \"kubernetes.io/projected/6db99fd7-2fda-4110-849e-93a84016c5d6-kube-api-access-qpfqk\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:19 crc kubenswrapper[4707]: I1204 09:44:19.963120 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6db99fd7-2fda-4110-849e-93a84016c5d6-bound-sa-token\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:19 crc kubenswrapper[4707]: I1204 09:44:19.963155 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:19 crc kubenswrapper[4707]: I1204 09:44:19.963245 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6db99fd7-2fda-4110-849e-93a84016c5d6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:19 crc kubenswrapper[4707]: I1204 09:44:19.963287 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6db99fd7-2fda-4110-849e-93a84016c5d6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:19 crc kubenswrapper[4707]: I1204 09:44:19.963434 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6db99fd7-2fda-4110-849e-93a84016c5d6-registry-tls\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:19 crc kubenswrapper[4707]: I1204 09:44:19.963524 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6db99fd7-2fda-4110-849e-93a84016c5d6-registry-certificates\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:19 crc kubenswrapper[4707]: I1204 09:44:19.963575 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6db99fd7-2fda-4110-849e-93a84016c5d6-trusted-ca\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:19 crc kubenswrapper[4707]: I1204 09:44:19.985130 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.064680 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6db99fd7-2fda-4110-849e-93a84016c5d6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.064740 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6db99fd7-2fda-4110-849e-93a84016c5d6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.064787 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6db99fd7-2fda-4110-849e-93a84016c5d6-registry-tls\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.064820 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6db99fd7-2fda-4110-849e-93a84016c5d6-registry-certificates\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.064849 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6db99fd7-2fda-4110-849e-93a84016c5d6-trusted-ca\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.064881 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpfqk\" (UniqueName: \"kubernetes.io/projected/6db99fd7-2fda-4110-849e-93a84016c5d6-kube-api-access-qpfqk\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.064910 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6db99fd7-2fda-4110-849e-93a84016c5d6-bound-sa-token\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.065705 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6db99fd7-2fda-4110-849e-93a84016c5d6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.066228 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6db99fd7-2fda-4110-849e-93a84016c5d6-registry-certificates\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.066974 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6db99fd7-2fda-4110-849e-93a84016c5d6-trusted-ca\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.075301 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6db99fd7-2fda-4110-849e-93a84016c5d6-registry-tls\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.077051 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6db99fd7-2fda-4110-849e-93a84016c5d6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.084103 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6db99fd7-2fda-4110-849e-93a84016c5d6-bound-sa-token\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.085082 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpfqk\" (UniqueName: \"kubernetes.io/projected/6db99fd7-2fda-4110-849e-93a84016c5d6-kube-api-access-qpfqk\") pod \"image-registry-66df7c8f76-lzd69\" (UID: \"6db99fd7-2fda-4110-849e-93a84016c5d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.219522 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:20 crc kubenswrapper[4707]: I1204 09:44:20.631680 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lzd69"] Dec 04 09:44:21 crc kubenswrapper[4707]: I1204 09:44:21.251361 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" event={"ID":"6db99fd7-2fda-4110-849e-93a84016c5d6","Type":"ContainerStarted","Data":"63db0b6f57d10e2d513e404d47d16cdf4e2396d8d79bb4c076c6e2095bbc07b1"} Dec 04 09:44:21 crc kubenswrapper[4707]: I1204 09:44:21.251854 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" event={"ID":"6db99fd7-2fda-4110-849e-93a84016c5d6","Type":"ContainerStarted","Data":"44c7885250920865e64356d7d72d14b07cdecd2a87e2290ef142864328fd4d6b"} Dec 04 09:44:21 crc kubenswrapper[4707]: I1204 09:44:21.251884 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:21 crc kubenswrapper[4707]: I1204 09:44:21.271311 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" podStartSLOduration=2.271286895 podStartE2EDuration="2.271286895s" podCreationTimestamp="2025-12-04 09:44:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:44:21.269907332 +0000 UTC m=+360.705729859" watchObservedRunningTime="2025-12-04 09:44:21.271286895 +0000 UTC m=+360.707109402" Dec 04 09:44:30 crc kubenswrapper[4707]: I1204 09:44:30.817392 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:44:30 crc kubenswrapper[4707]: I1204 09:44:30.817873 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.866500 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gcf45"] Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.867577 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gcf45" podUID="aad5050f-90b9-4364-9dc7-c32892d674d0" containerName="registry-server" containerID="cri-o://6277faef785e7ec6a2aa2e75ad9ba784613777409692d91f378bbbbb51e5c4f7" gracePeriod=30 Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.878474 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qrqx7"] Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.878762 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qrqx7" podUID="2ffc2219-3702-4f09-9511-145919595de9" containerName="registry-server" containerID="cri-o://b31ff96d13f1b2d5ae54edc65697d5360d6d3ba6d2a5293f0b447f1a972dd21f" gracePeriod=30 Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.890009 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ncmcs"] Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.890271 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" podUID="f0411515-ac32-4ad1-a956-ce737c8d0d75" containerName="marketplace-operator" containerID="cri-o://c980d1254f0d8fb4a796a610dcece477af02acf05b1cbbfc8515567d3a288818" gracePeriod=30 Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.891648 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-57t67"] Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.891806 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-57t67" podUID="dee25bc0-3766-43d6-8dde-8d316c48bd04" containerName="registry-server" containerID="cri-o://fba7bb099b62618e0a229b57697bac350adf20523b22e5cfd7106887383a966f" gracePeriod=30 Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.898376 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wnkkq"] Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.898852 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wnkkq" podUID="579805ef-aec3-4ea7-b5af-bdf514c7eb1f" containerName="registry-server" containerID="cri-o://df5fd45a3d9866f3669d1daada9ca9f1cceb0346d51b896f77002e671efaf8a0" gracePeriod=30 Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.914550 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8qmqt"] Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.915189 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.929618 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8qmqt"] Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.980868 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/860836b6-d7c9-4c56-9193-c4bbaeca659b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8qmqt\" (UID: \"860836b6-d7c9-4c56-9193-c4bbaeca659b\") " pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.980924 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/860836b6-d7c9-4c56-9193-c4bbaeca659b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8qmqt\" (UID: \"860836b6-d7c9-4c56-9193-c4bbaeca659b\") " pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" Dec 04 09:44:34 crc kubenswrapper[4707]: I1204 09:44:34.980944 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tgnh\" (UniqueName: \"kubernetes.io/projected/860836b6-d7c9-4c56-9193-c4bbaeca659b-kube-api-access-4tgnh\") pod \"marketplace-operator-79b997595-8qmqt\" (UID: \"860836b6-d7c9-4c56-9193-c4bbaeca659b\") " pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.082411 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/860836b6-d7c9-4c56-9193-c4bbaeca659b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8qmqt\" (UID: \"860836b6-d7c9-4c56-9193-c4bbaeca659b\") " pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.082460 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/860836b6-d7c9-4c56-9193-c4bbaeca659b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8qmqt\" (UID: \"860836b6-d7c9-4c56-9193-c4bbaeca659b\") " pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.082480 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tgnh\" (UniqueName: \"kubernetes.io/projected/860836b6-d7c9-4c56-9193-c4bbaeca659b-kube-api-access-4tgnh\") pod \"marketplace-operator-79b997595-8qmqt\" (UID: \"860836b6-d7c9-4c56-9193-c4bbaeca659b\") " pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.084217 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/860836b6-d7c9-4c56-9193-c4bbaeca659b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8qmqt\" (UID: \"860836b6-d7c9-4c56-9193-c4bbaeca659b\") " pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.090167 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/860836b6-d7c9-4c56-9193-c4bbaeca659b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8qmqt\" (UID: \"860836b6-d7c9-4c56-9193-c4bbaeca659b\") " pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.103467 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tgnh\" (UniqueName: \"kubernetes.io/projected/860836b6-d7c9-4c56-9193-c4bbaeca659b-kube-api-access-4tgnh\") pod \"marketplace-operator-79b997595-8qmqt\" (UID: \"860836b6-d7c9-4c56-9193-c4bbaeca659b\") " pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.243606 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.325200 4707 generic.go:334] "Generic (PLEG): container finished" podID="f0411515-ac32-4ad1-a956-ce737c8d0d75" containerID="c980d1254f0d8fb4a796a610dcece477af02acf05b1cbbfc8515567d3a288818" exitCode=0 Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.325289 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" event={"ID":"f0411515-ac32-4ad1-a956-ce737c8d0d75","Type":"ContainerDied","Data":"c980d1254f0d8fb4a796a610dcece477af02acf05b1cbbfc8515567d3a288818"} Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.325370 4707 scope.go:117] "RemoveContainer" containerID="5ea293cf21194305a675510f4b4c7e7f0097cc2919231a8170612e9c0145805f" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.329714 4707 generic.go:334] "Generic (PLEG): container finished" podID="2ffc2219-3702-4f09-9511-145919595de9" containerID="b31ff96d13f1b2d5ae54edc65697d5360d6d3ba6d2a5293f0b447f1a972dd21f" exitCode=0 Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.329830 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrqx7" event={"ID":"2ffc2219-3702-4f09-9511-145919595de9","Type":"ContainerDied","Data":"b31ff96d13f1b2d5ae54edc65697d5360d6d3ba6d2a5293f0b447f1a972dd21f"} Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.331946 4707 generic.go:334] "Generic (PLEG): container finished" podID="dee25bc0-3766-43d6-8dde-8d316c48bd04" containerID="fba7bb099b62618e0a229b57697bac350adf20523b22e5cfd7106887383a966f" exitCode=0 Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.332043 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57t67" event={"ID":"dee25bc0-3766-43d6-8dde-8d316c48bd04","Type":"ContainerDied","Data":"fba7bb099b62618e0a229b57697bac350adf20523b22e5cfd7106887383a966f"} Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.334773 4707 generic.go:334] "Generic (PLEG): container finished" podID="579805ef-aec3-4ea7-b5af-bdf514c7eb1f" containerID="df5fd45a3d9866f3669d1daada9ca9f1cceb0346d51b896f77002e671efaf8a0" exitCode=0 Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.334847 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wnkkq" event={"ID":"579805ef-aec3-4ea7-b5af-bdf514c7eb1f","Type":"ContainerDied","Data":"df5fd45a3d9866f3669d1daada9ca9f1cceb0346d51b896f77002e671efaf8a0"} Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.336889 4707 generic.go:334] "Generic (PLEG): container finished" podID="aad5050f-90b9-4364-9dc7-c32892d674d0" containerID="6277faef785e7ec6a2aa2e75ad9ba784613777409692d91f378bbbbb51e5c4f7" exitCode=0 Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.336921 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gcf45" event={"ID":"aad5050f-90b9-4364-9dc7-c32892d674d0","Type":"ContainerDied","Data":"6277faef785e7ec6a2aa2e75ad9ba784613777409692d91f378bbbbb51e5c4f7"} Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.336943 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gcf45" event={"ID":"aad5050f-90b9-4364-9dc7-c32892d674d0","Type":"ContainerDied","Data":"3a6952e310aebcea7e41d7576c896aae1be2425ab6457b73017ca8e5c7a2e477"} Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.336958 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a6952e310aebcea7e41d7576c896aae1be2425ab6457b73017ca8e5c7a2e477" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.386558 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.395267 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.417374 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.433478 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.442411 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.486996 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dee25bc0-3766-43d6-8dde-8d316c48bd04-utilities\") pod \"dee25bc0-3766-43d6-8dde-8d316c48bd04\" (UID: \"dee25bc0-3766-43d6-8dde-8d316c48bd04\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.487042 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aad5050f-90b9-4364-9dc7-c32892d674d0-utilities\") pod \"aad5050f-90b9-4364-9dc7-c32892d674d0\" (UID: \"aad5050f-90b9-4364-9dc7-c32892d674d0\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.487089 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0411515-ac32-4ad1-a956-ce737c8d0d75-marketplace-trusted-ca\") pod \"f0411515-ac32-4ad1-a956-ce737c8d0d75\" (UID: \"f0411515-ac32-4ad1-a956-ce737c8d0d75\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.487126 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0411515-ac32-4ad1-a956-ce737c8d0d75-marketplace-operator-metrics\") pod \"f0411515-ac32-4ad1-a956-ce737c8d0d75\" (UID: \"f0411515-ac32-4ad1-a956-ce737c8d0d75\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.487150 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-utilities\") pod \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\" (UID: \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.487179 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aad5050f-90b9-4364-9dc7-c32892d674d0-catalog-content\") pod \"aad5050f-90b9-4364-9dc7-c32892d674d0\" (UID: \"aad5050f-90b9-4364-9dc7-c32892d674d0\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.487204 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ffc2219-3702-4f09-9511-145919595de9-utilities\") pod \"2ffc2219-3702-4f09-9511-145919595de9\" (UID: \"2ffc2219-3702-4f09-9511-145919595de9\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.487229 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48dnx\" (UniqueName: \"kubernetes.io/projected/aad5050f-90b9-4364-9dc7-c32892d674d0-kube-api-access-48dnx\") pod \"aad5050f-90b9-4364-9dc7-c32892d674d0\" (UID: \"aad5050f-90b9-4364-9dc7-c32892d674d0\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.487258 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-catalog-content\") pod \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\" (UID: \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.487290 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hdx7\" (UniqueName: \"kubernetes.io/projected/2ffc2219-3702-4f09-9511-145919595de9-kube-api-access-8hdx7\") pod \"2ffc2219-3702-4f09-9511-145919595de9\" (UID: \"2ffc2219-3702-4f09-9511-145919595de9\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.487322 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68w7p\" (UniqueName: \"kubernetes.io/projected/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-kube-api-access-68w7p\") pod \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\" (UID: \"579805ef-aec3-4ea7-b5af-bdf514c7eb1f\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.487365 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z99dc\" (UniqueName: \"kubernetes.io/projected/dee25bc0-3766-43d6-8dde-8d316c48bd04-kube-api-access-z99dc\") pod \"dee25bc0-3766-43d6-8dde-8d316c48bd04\" (UID: \"dee25bc0-3766-43d6-8dde-8d316c48bd04\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.487399 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ffc2219-3702-4f09-9511-145919595de9-catalog-content\") pod \"2ffc2219-3702-4f09-9511-145919595de9\" (UID: \"2ffc2219-3702-4f09-9511-145919595de9\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.487450 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5jkd\" (UniqueName: \"kubernetes.io/projected/f0411515-ac32-4ad1-a956-ce737c8d0d75-kube-api-access-x5jkd\") pod \"f0411515-ac32-4ad1-a956-ce737c8d0d75\" (UID: \"f0411515-ac32-4ad1-a956-ce737c8d0d75\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.487486 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dee25bc0-3766-43d6-8dde-8d316c48bd04-catalog-content\") pod \"dee25bc0-3766-43d6-8dde-8d316c48bd04\" (UID: \"dee25bc0-3766-43d6-8dde-8d316c48bd04\") " Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.488019 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0411515-ac32-4ad1-a956-ce737c8d0d75-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "f0411515-ac32-4ad1-a956-ce737c8d0d75" (UID: "f0411515-ac32-4ad1-a956-ce737c8d0d75"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.489147 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-utilities" (OuterVolumeSpecName: "utilities") pod "579805ef-aec3-4ea7-b5af-bdf514c7eb1f" (UID: "579805ef-aec3-4ea7-b5af-bdf514c7eb1f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.490963 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ffc2219-3702-4f09-9511-145919595de9-utilities" (OuterVolumeSpecName: "utilities") pod "2ffc2219-3702-4f09-9511-145919595de9" (UID: "2ffc2219-3702-4f09-9511-145919595de9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.492097 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aad5050f-90b9-4364-9dc7-c32892d674d0-utilities" (OuterVolumeSpecName: "utilities") pod "aad5050f-90b9-4364-9dc7-c32892d674d0" (UID: "aad5050f-90b9-4364-9dc7-c32892d674d0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.493371 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dee25bc0-3766-43d6-8dde-8d316c48bd04-utilities" (OuterVolumeSpecName: "utilities") pod "dee25bc0-3766-43d6-8dde-8d316c48bd04" (UID: "dee25bc0-3766-43d6-8dde-8d316c48bd04"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.494817 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ffc2219-3702-4f09-9511-145919595de9-kube-api-access-8hdx7" (OuterVolumeSpecName: "kube-api-access-8hdx7") pod "2ffc2219-3702-4f09-9511-145919595de9" (UID: "2ffc2219-3702-4f09-9511-145919595de9"). InnerVolumeSpecName "kube-api-access-8hdx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.496178 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0411515-ac32-4ad1-a956-ce737c8d0d75-kube-api-access-x5jkd" (OuterVolumeSpecName: "kube-api-access-x5jkd") pod "f0411515-ac32-4ad1-a956-ce737c8d0d75" (UID: "f0411515-ac32-4ad1-a956-ce737c8d0d75"). InnerVolumeSpecName "kube-api-access-x5jkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.496639 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dee25bc0-3766-43d6-8dde-8d316c48bd04-kube-api-access-z99dc" (OuterVolumeSpecName: "kube-api-access-z99dc") pod "dee25bc0-3766-43d6-8dde-8d316c48bd04" (UID: "dee25bc0-3766-43d6-8dde-8d316c48bd04"). InnerVolumeSpecName "kube-api-access-z99dc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.496808 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aad5050f-90b9-4364-9dc7-c32892d674d0-kube-api-access-48dnx" (OuterVolumeSpecName: "kube-api-access-48dnx") pod "aad5050f-90b9-4364-9dc7-c32892d674d0" (UID: "aad5050f-90b9-4364-9dc7-c32892d674d0"). InnerVolumeSpecName "kube-api-access-48dnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.497533 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-kube-api-access-68w7p" (OuterVolumeSpecName: "kube-api-access-68w7p") pod "579805ef-aec3-4ea7-b5af-bdf514c7eb1f" (UID: "579805ef-aec3-4ea7-b5af-bdf514c7eb1f"). InnerVolumeSpecName "kube-api-access-68w7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.497694 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0411515-ac32-4ad1-a956-ce737c8d0d75-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "f0411515-ac32-4ad1-a956-ce737c8d0d75" (UID: "f0411515-ac32-4ad1-a956-ce737c8d0d75"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.531003 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dee25bc0-3766-43d6-8dde-8d316c48bd04-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dee25bc0-3766-43d6-8dde-8d316c48bd04" (UID: "dee25bc0-3766-43d6-8dde-8d316c48bd04"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.547187 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aad5050f-90b9-4364-9dc7-c32892d674d0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aad5050f-90b9-4364-9dc7-c32892d674d0" (UID: "aad5050f-90b9-4364-9dc7-c32892d674d0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.565021 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ffc2219-3702-4f09-9511-145919595de9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2ffc2219-3702-4f09-9511-145919595de9" (UID: "2ffc2219-3702-4f09-9511-145919595de9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.589191 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5jkd\" (UniqueName: \"kubernetes.io/projected/f0411515-ac32-4ad1-a956-ce737c8d0d75-kube-api-access-x5jkd\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.589222 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dee25bc0-3766-43d6-8dde-8d316c48bd04-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.589232 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dee25bc0-3766-43d6-8dde-8d316c48bd04-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.589241 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aad5050f-90b9-4364-9dc7-c32892d674d0-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.589250 4707 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0411515-ac32-4ad1-a956-ce737c8d0d75-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.589258 4707 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0411515-ac32-4ad1-a956-ce737c8d0d75-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.589268 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.589277 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aad5050f-90b9-4364-9dc7-c32892d674d0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.589286 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ffc2219-3702-4f09-9511-145919595de9-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.589293 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48dnx\" (UniqueName: \"kubernetes.io/projected/aad5050f-90b9-4364-9dc7-c32892d674d0-kube-api-access-48dnx\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.589302 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hdx7\" (UniqueName: \"kubernetes.io/projected/2ffc2219-3702-4f09-9511-145919595de9-kube-api-access-8hdx7\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.589311 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68w7p\" (UniqueName: \"kubernetes.io/projected/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-kube-api-access-68w7p\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.589319 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z99dc\" (UniqueName: \"kubernetes.io/projected/dee25bc0-3766-43d6-8dde-8d316c48bd04-kube-api-access-z99dc\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.589327 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ffc2219-3702-4f09-9511-145919595de9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.611866 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "579805ef-aec3-4ea7-b5af-bdf514c7eb1f" (UID: "579805ef-aec3-4ea7-b5af-bdf514c7eb1f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.691023 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/579805ef-aec3-4ea7-b5af-bdf514c7eb1f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:44:35 crc kubenswrapper[4707]: I1204 09:44:35.761031 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8qmqt"] Dec 04 09:44:35 crc kubenswrapper[4707]: W1204 09:44:35.767501 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod860836b6_d7c9_4c56_9193_c4bbaeca659b.slice/crio-e7244dd647d2713c9b1faac48960c487fbd48ce8ea58e44995d009b2742216ec WatchSource:0}: Error finding container e7244dd647d2713c9b1faac48960c487fbd48ce8ea58e44995d009b2742216ec: Status 404 returned error can't find the container with id e7244dd647d2713c9b1faac48960c487fbd48ce8ea58e44995d009b2742216ec Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.343644 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" event={"ID":"860836b6-d7c9-4c56-9193-c4bbaeca659b","Type":"ContainerStarted","Data":"2d96862d4bab181bb859dc132f1dba12ae36b428d90d149ee9df4768e17dd4a9"} Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.343689 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" event={"ID":"860836b6-d7c9-4c56-9193-c4bbaeca659b","Type":"ContainerStarted","Data":"e7244dd647d2713c9b1faac48960c487fbd48ce8ea58e44995d009b2742216ec"} Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.344234 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.347165 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.348361 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57t67" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.348593 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57t67" event={"ID":"dee25bc0-3766-43d6-8dde-8d316c48bd04","Type":"ContainerDied","Data":"4c9fecffa592a2c4290ac9cebeb8f8d472207e3b4fb61e18e50599b3f7e41db7"} Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.348657 4707 scope.go:117] "RemoveContainer" containerID="fba7bb099b62618e0a229b57697bac350adf20523b22e5cfd7106887383a966f" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.351984 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wnkkq" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.352032 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wnkkq" event={"ID":"579805ef-aec3-4ea7-b5af-bdf514c7eb1f","Type":"ContainerDied","Data":"d73e742e0c49807694b5b1340419b92b6d824c9db2d7d8d696f261fba97d4835"} Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.353790 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.353879 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ncmcs" event={"ID":"f0411515-ac32-4ad1-a956-ce737c8d0d75","Type":"ContainerDied","Data":"acb5917138225493b6dc86fff796d9f3bd60c524fcab69f88ca29a5f1bc19238"} Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.359383 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-8qmqt" podStartSLOduration=2.359285541 podStartE2EDuration="2.359285541s" podCreationTimestamp="2025-12-04 09:44:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:44:36.358208468 +0000 UTC m=+375.794030995" watchObservedRunningTime="2025-12-04 09:44:36.359285541 +0000 UTC m=+375.795108058" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.364765 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gcf45" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.365922 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qrqx7" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.365914 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrqx7" event={"ID":"2ffc2219-3702-4f09-9511-145919595de9","Type":"ContainerDied","Data":"861beda45abce154c5955d4ad718c75a36b2a3e3d6d0ddb2c0fcd8ea272c59bd"} Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.368734 4707 scope.go:117] "RemoveContainer" containerID="24716a6a57f76f1ad09c8ab49ff658f478a0cf7e4d43c4aae360235bb2697abe" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.408878 4707 scope.go:117] "RemoveContainer" containerID="b70f6efeca4b03e6e62398fd2c26c3dd7ac0ce54b2b6106db3c26828ad2f7d0c" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.420095 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ncmcs"] Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.425843 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ncmcs"] Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.434407 4707 scope.go:117] "RemoveContainer" containerID="df5fd45a3d9866f3669d1daada9ca9f1cceb0346d51b896f77002e671efaf8a0" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.434529 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gcf45"] Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.441296 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gcf45"] Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.451379 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wnkkq"] Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.460124 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wnkkq"] Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.461795 4707 scope.go:117] "RemoveContainer" containerID="db8b70e59fb9d66a9f4832e51aa6134572c47ca7aa524a82d38f71924f66d909" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.467776 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-57t67"] Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.470581 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-57t67"] Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.474595 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qrqx7"] Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.477346 4707 scope.go:117] "RemoveContainer" containerID="b42536a3c54bcff747a011758f65f926a400d3165d9ec1cb05b63244bb603518" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.478146 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qrqx7"] Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.492303 4707 scope.go:117] "RemoveContainer" containerID="c980d1254f0d8fb4a796a610dcece477af02acf05b1cbbfc8515567d3a288818" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.506574 4707 scope.go:117] "RemoveContainer" containerID="b31ff96d13f1b2d5ae54edc65697d5360d6d3ba6d2a5293f0b447f1a972dd21f" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.519124 4707 scope.go:117] "RemoveContainer" containerID="5a196f573fe2a853e1fe68a57e6c5def7c110fc93a805fce66c61316223c7a8f" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.532755 4707 scope.go:117] "RemoveContainer" containerID="f71212486bf82a515b68ed0139d0f6904f1aad9af266d6294399968a198b4be3" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.851489 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ffc2219-3702-4f09-9511-145919595de9" path="/var/lib/kubelet/pods/2ffc2219-3702-4f09-9511-145919595de9/volumes" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.852295 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="579805ef-aec3-4ea7-b5af-bdf514c7eb1f" path="/var/lib/kubelet/pods/579805ef-aec3-4ea7-b5af-bdf514c7eb1f/volumes" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.852930 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aad5050f-90b9-4364-9dc7-c32892d674d0" path="/var/lib/kubelet/pods/aad5050f-90b9-4364-9dc7-c32892d674d0/volumes" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.853903 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dee25bc0-3766-43d6-8dde-8d316c48bd04" path="/var/lib/kubelet/pods/dee25bc0-3766-43d6-8dde-8d316c48bd04/volumes" Dec 04 09:44:36 crc kubenswrapper[4707]: I1204 09:44:36.854635 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0411515-ac32-4ad1-a956-ce737c8d0d75" path="/var/lib/kubelet/pods/f0411515-ac32-4ad1-a956-ce737c8d0d75/volumes" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.083542 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ngcrl"] Dec 04 09:44:37 crc kubenswrapper[4707]: E1204 09:44:37.084004 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dee25bc0-3766-43d6-8dde-8d316c48bd04" containerName="extract-utilities" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.084089 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="dee25bc0-3766-43d6-8dde-8d316c48bd04" containerName="extract-utilities" Dec 04 09:44:37 crc kubenswrapper[4707]: E1204 09:44:37.084157 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="579805ef-aec3-4ea7-b5af-bdf514c7eb1f" containerName="extract-content" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.084225 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="579805ef-aec3-4ea7-b5af-bdf514c7eb1f" containerName="extract-content" Dec 04 09:44:37 crc kubenswrapper[4707]: E1204 09:44:37.084303 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0411515-ac32-4ad1-a956-ce737c8d0d75" containerName="marketplace-operator" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.084396 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0411515-ac32-4ad1-a956-ce737c8d0d75" containerName="marketplace-operator" Dec 04 09:44:37 crc kubenswrapper[4707]: E1204 09:44:37.084475 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dee25bc0-3766-43d6-8dde-8d316c48bd04" containerName="extract-content" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.084549 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="dee25bc0-3766-43d6-8dde-8d316c48bd04" containerName="extract-content" Dec 04 09:44:37 crc kubenswrapper[4707]: E1204 09:44:37.084630 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ffc2219-3702-4f09-9511-145919595de9" containerName="registry-server" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.084707 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ffc2219-3702-4f09-9511-145919595de9" containerName="registry-server" Dec 04 09:44:37 crc kubenswrapper[4707]: E1204 09:44:37.084779 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0411515-ac32-4ad1-a956-ce737c8d0d75" containerName="marketplace-operator" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.084846 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0411515-ac32-4ad1-a956-ce737c8d0d75" containerName="marketplace-operator" Dec 04 09:44:37 crc kubenswrapper[4707]: E1204 09:44:37.084923 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="579805ef-aec3-4ea7-b5af-bdf514c7eb1f" containerName="registry-server" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.084983 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="579805ef-aec3-4ea7-b5af-bdf514c7eb1f" containerName="registry-server" Dec 04 09:44:37 crc kubenswrapper[4707]: E1204 09:44:37.085042 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ffc2219-3702-4f09-9511-145919595de9" containerName="extract-content" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.085126 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ffc2219-3702-4f09-9511-145919595de9" containerName="extract-content" Dec 04 09:44:37 crc kubenswrapper[4707]: E1204 09:44:37.085208 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="579805ef-aec3-4ea7-b5af-bdf514c7eb1f" containerName="extract-utilities" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.085280 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="579805ef-aec3-4ea7-b5af-bdf514c7eb1f" containerName="extract-utilities" Dec 04 09:44:37 crc kubenswrapper[4707]: E1204 09:44:37.085370 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ffc2219-3702-4f09-9511-145919595de9" containerName="extract-utilities" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.085436 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ffc2219-3702-4f09-9511-145919595de9" containerName="extract-utilities" Dec 04 09:44:37 crc kubenswrapper[4707]: E1204 09:44:37.085493 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aad5050f-90b9-4364-9dc7-c32892d674d0" containerName="extract-utilities" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.085569 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="aad5050f-90b9-4364-9dc7-c32892d674d0" containerName="extract-utilities" Dec 04 09:44:37 crc kubenswrapper[4707]: E1204 09:44:37.085793 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aad5050f-90b9-4364-9dc7-c32892d674d0" containerName="extract-content" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.085882 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="aad5050f-90b9-4364-9dc7-c32892d674d0" containerName="extract-content" Dec 04 09:44:37 crc kubenswrapper[4707]: E1204 09:44:37.085944 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dee25bc0-3766-43d6-8dde-8d316c48bd04" containerName="registry-server" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.085995 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="dee25bc0-3766-43d6-8dde-8d316c48bd04" containerName="registry-server" Dec 04 09:44:37 crc kubenswrapper[4707]: E1204 09:44:37.086051 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aad5050f-90b9-4364-9dc7-c32892d674d0" containerName="registry-server" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.086139 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="aad5050f-90b9-4364-9dc7-c32892d674d0" containerName="registry-server" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.086303 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0411515-ac32-4ad1-a956-ce737c8d0d75" containerName="marketplace-operator" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.086422 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0411515-ac32-4ad1-a956-ce737c8d0d75" containerName="marketplace-operator" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.086480 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="579805ef-aec3-4ea7-b5af-bdf514c7eb1f" containerName="registry-server" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.086540 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="dee25bc0-3766-43d6-8dde-8d316c48bd04" containerName="registry-server" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.086592 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ffc2219-3702-4f09-9511-145919595de9" containerName="registry-server" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.086651 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="aad5050f-90b9-4364-9dc7-c32892d674d0" containerName="registry-server" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.087579 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.089829 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.095217 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ngcrl"] Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.112073 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxrts\" (UniqueName: \"kubernetes.io/projected/2a03b094-f535-4b09-9bef-016450d98586-kube-api-access-vxrts\") pod \"redhat-marketplace-ngcrl\" (UID: \"2a03b094-f535-4b09-9bef-016450d98586\") " pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.112161 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a03b094-f535-4b09-9bef-016450d98586-utilities\") pod \"redhat-marketplace-ngcrl\" (UID: \"2a03b094-f535-4b09-9bef-016450d98586\") " pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.112199 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a03b094-f535-4b09-9bef-016450d98586-catalog-content\") pod \"redhat-marketplace-ngcrl\" (UID: \"2a03b094-f535-4b09-9bef-016450d98586\") " pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.212928 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a03b094-f535-4b09-9bef-016450d98586-utilities\") pod \"redhat-marketplace-ngcrl\" (UID: \"2a03b094-f535-4b09-9bef-016450d98586\") " pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.212983 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a03b094-f535-4b09-9bef-016450d98586-catalog-content\") pod \"redhat-marketplace-ngcrl\" (UID: \"2a03b094-f535-4b09-9bef-016450d98586\") " pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.213045 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxrts\" (UniqueName: \"kubernetes.io/projected/2a03b094-f535-4b09-9bef-016450d98586-kube-api-access-vxrts\") pod \"redhat-marketplace-ngcrl\" (UID: \"2a03b094-f535-4b09-9bef-016450d98586\") " pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.213774 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a03b094-f535-4b09-9bef-016450d98586-catalog-content\") pod \"redhat-marketplace-ngcrl\" (UID: \"2a03b094-f535-4b09-9bef-016450d98586\") " pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.214900 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a03b094-f535-4b09-9bef-016450d98586-utilities\") pod \"redhat-marketplace-ngcrl\" (UID: \"2a03b094-f535-4b09-9bef-016450d98586\") " pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.230398 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxrts\" (UniqueName: \"kubernetes.io/projected/2a03b094-f535-4b09-9bef-016450d98586-kube-api-access-vxrts\") pod \"redhat-marketplace-ngcrl\" (UID: \"2a03b094-f535-4b09-9bef-016450d98586\") " pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.281447 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pcrk2"] Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.282504 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.286594 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.289888 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pcrk2"] Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.314150 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ff4a232-1027-4cb9-a021-9b320f41b041-catalog-content\") pod \"redhat-operators-pcrk2\" (UID: \"5ff4a232-1027-4cb9-a021-9b320f41b041\") " pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.314239 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncbzz\" (UniqueName: \"kubernetes.io/projected/5ff4a232-1027-4cb9-a021-9b320f41b041-kube-api-access-ncbzz\") pod \"redhat-operators-pcrk2\" (UID: \"5ff4a232-1027-4cb9-a021-9b320f41b041\") " pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.314290 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ff4a232-1027-4cb9-a021-9b320f41b041-utilities\") pod \"redhat-operators-pcrk2\" (UID: \"5ff4a232-1027-4cb9-a021-9b320f41b041\") " pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.411615 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.415555 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ff4a232-1027-4cb9-a021-9b320f41b041-catalog-content\") pod \"redhat-operators-pcrk2\" (UID: \"5ff4a232-1027-4cb9-a021-9b320f41b041\") " pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.415638 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncbzz\" (UniqueName: \"kubernetes.io/projected/5ff4a232-1027-4cb9-a021-9b320f41b041-kube-api-access-ncbzz\") pod \"redhat-operators-pcrk2\" (UID: \"5ff4a232-1027-4cb9-a021-9b320f41b041\") " pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.415743 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ff4a232-1027-4cb9-a021-9b320f41b041-utilities\") pod \"redhat-operators-pcrk2\" (UID: \"5ff4a232-1027-4cb9-a021-9b320f41b041\") " pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.416227 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ff4a232-1027-4cb9-a021-9b320f41b041-utilities\") pod \"redhat-operators-pcrk2\" (UID: \"5ff4a232-1027-4cb9-a021-9b320f41b041\") " pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.416648 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ff4a232-1027-4cb9-a021-9b320f41b041-catalog-content\") pod \"redhat-operators-pcrk2\" (UID: \"5ff4a232-1027-4cb9-a021-9b320f41b041\") " pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.433252 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncbzz\" (UniqueName: \"kubernetes.io/projected/5ff4a232-1027-4cb9-a021-9b320f41b041-kube-api-access-ncbzz\") pod \"redhat-operators-pcrk2\" (UID: \"5ff4a232-1027-4cb9-a021-9b320f41b041\") " pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.602438 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.793333 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ngcrl"] Dec 04 09:44:37 crc kubenswrapper[4707]: I1204 09:44:37.972542 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pcrk2"] Dec 04 09:44:37 crc kubenswrapper[4707]: W1204 09:44:37.996096 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ff4a232_1027_4cb9_a021_9b320f41b041.slice/crio-eb5d901bc777c965b65d5b0929b20da890e97e4ece48cb58d20b8f290183f6e7 WatchSource:0}: Error finding container eb5d901bc777c965b65d5b0929b20da890e97e4ece48cb58d20b8f290183f6e7: Status 404 returned error can't find the container with id eb5d901bc777c965b65d5b0929b20da890e97e4ece48cb58d20b8f290183f6e7 Dec 04 09:44:38 crc kubenswrapper[4707]: I1204 09:44:38.379432 4707 generic.go:334] "Generic (PLEG): container finished" podID="5ff4a232-1027-4cb9-a021-9b320f41b041" containerID="d7d59df93779524263725dd6677b7c5d75114a34b17fadd28424d5d9bf33182c" exitCode=0 Dec 04 09:44:38 crc kubenswrapper[4707]: I1204 09:44:38.379534 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcrk2" event={"ID":"5ff4a232-1027-4cb9-a021-9b320f41b041","Type":"ContainerDied","Data":"d7d59df93779524263725dd6677b7c5d75114a34b17fadd28424d5d9bf33182c"} Dec 04 09:44:38 crc kubenswrapper[4707]: I1204 09:44:38.379798 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcrk2" event={"ID":"5ff4a232-1027-4cb9-a021-9b320f41b041","Type":"ContainerStarted","Data":"eb5d901bc777c965b65d5b0929b20da890e97e4ece48cb58d20b8f290183f6e7"} Dec 04 09:44:38 crc kubenswrapper[4707]: I1204 09:44:38.381645 4707 generic.go:334] "Generic (PLEG): container finished" podID="2a03b094-f535-4b09-9bef-016450d98586" containerID="49763f172236e47e2f3e2caaad0682680a0cab0056f927037e847d2db19d3347" exitCode=0 Dec 04 09:44:38 crc kubenswrapper[4707]: I1204 09:44:38.381736 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ngcrl" event={"ID":"2a03b094-f535-4b09-9bef-016450d98586","Type":"ContainerDied","Data":"49763f172236e47e2f3e2caaad0682680a0cab0056f927037e847d2db19d3347"} Dec 04 09:44:38 crc kubenswrapper[4707]: I1204 09:44:38.381772 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ngcrl" event={"ID":"2a03b094-f535-4b09-9bef-016450d98586","Type":"ContainerStarted","Data":"0b7ac0004ac89264f91906fe9eca59e52fe62a1f0ffb7719bde5a00928515ca9"} Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.388410 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcrk2" event={"ID":"5ff4a232-1027-4cb9-a021-9b320f41b041","Type":"ContainerStarted","Data":"656e777d6323e7917da7508e05e6f42947b700cd4ad14c4fe19da7ce26619cc1"} Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.393171 4707 generic.go:334] "Generic (PLEG): container finished" podID="2a03b094-f535-4b09-9bef-016450d98586" containerID="f8d9832c6e858a605bdeab4e38a2634332833dee5a454b0f79971251c2fd8ccb" exitCode=0 Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.393209 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ngcrl" event={"ID":"2a03b094-f535-4b09-9bef-016450d98586","Type":"ContainerDied","Data":"f8d9832c6e858a605bdeab4e38a2634332833dee5a454b0f79971251c2fd8ccb"} Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.486804 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-flfxl"] Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.488009 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.496091 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-flfxl"] Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.496265 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.554642 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae1d3840-1144-4905-8415-8817aa67d299-utilities\") pod \"certified-operators-flfxl\" (UID: \"ae1d3840-1144-4905-8415-8817aa67d299\") " pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.554714 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhh54\" (UniqueName: \"kubernetes.io/projected/ae1d3840-1144-4905-8415-8817aa67d299-kube-api-access-jhh54\") pod \"certified-operators-flfxl\" (UID: \"ae1d3840-1144-4905-8415-8817aa67d299\") " pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.554763 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae1d3840-1144-4905-8415-8817aa67d299-catalog-content\") pod \"certified-operators-flfxl\" (UID: \"ae1d3840-1144-4905-8415-8817aa67d299\") " pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.656084 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae1d3840-1144-4905-8415-8817aa67d299-utilities\") pod \"certified-operators-flfxl\" (UID: \"ae1d3840-1144-4905-8415-8817aa67d299\") " pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.656129 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhh54\" (UniqueName: \"kubernetes.io/projected/ae1d3840-1144-4905-8415-8817aa67d299-kube-api-access-jhh54\") pod \"certified-operators-flfxl\" (UID: \"ae1d3840-1144-4905-8415-8817aa67d299\") " pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.656150 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae1d3840-1144-4905-8415-8817aa67d299-catalog-content\") pod \"certified-operators-flfxl\" (UID: \"ae1d3840-1144-4905-8415-8817aa67d299\") " pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.656596 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae1d3840-1144-4905-8415-8817aa67d299-catalog-content\") pod \"certified-operators-flfxl\" (UID: \"ae1d3840-1144-4905-8415-8817aa67d299\") " pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.656644 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae1d3840-1144-4905-8415-8817aa67d299-utilities\") pod \"certified-operators-flfxl\" (UID: \"ae1d3840-1144-4905-8415-8817aa67d299\") " pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.675115 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhh54\" (UniqueName: \"kubernetes.io/projected/ae1d3840-1144-4905-8415-8817aa67d299-kube-api-access-jhh54\") pod \"certified-operators-flfxl\" (UID: \"ae1d3840-1144-4905-8415-8817aa67d299\") " pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.681667 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xbrw2"] Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.682716 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.684948 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.690422 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xbrw2"] Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.757761 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn8np\" (UniqueName: \"kubernetes.io/projected/ad0fabbb-7625-4520-8298-8379635bb03c-kube-api-access-cn8np\") pod \"community-operators-xbrw2\" (UID: \"ad0fabbb-7625-4520-8298-8379635bb03c\") " pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.757798 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad0fabbb-7625-4520-8298-8379635bb03c-utilities\") pod \"community-operators-xbrw2\" (UID: \"ad0fabbb-7625-4520-8298-8379635bb03c\") " pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.757820 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad0fabbb-7625-4520-8298-8379635bb03c-catalog-content\") pod \"community-operators-xbrw2\" (UID: \"ad0fabbb-7625-4520-8298-8379635bb03c\") " pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.807552 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.859024 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn8np\" (UniqueName: \"kubernetes.io/projected/ad0fabbb-7625-4520-8298-8379635bb03c-kube-api-access-cn8np\") pod \"community-operators-xbrw2\" (UID: \"ad0fabbb-7625-4520-8298-8379635bb03c\") " pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.859090 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad0fabbb-7625-4520-8298-8379635bb03c-utilities\") pod \"community-operators-xbrw2\" (UID: \"ad0fabbb-7625-4520-8298-8379635bb03c\") " pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.859121 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad0fabbb-7625-4520-8298-8379635bb03c-catalog-content\") pod \"community-operators-xbrw2\" (UID: \"ad0fabbb-7625-4520-8298-8379635bb03c\") " pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.859794 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad0fabbb-7625-4520-8298-8379635bb03c-catalog-content\") pod \"community-operators-xbrw2\" (UID: \"ad0fabbb-7625-4520-8298-8379635bb03c\") " pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.859817 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad0fabbb-7625-4520-8298-8379635bb03c-utilities\") pod \"community-operators-xbrw2\" (UID: \"ad0fabbb-7625-4520-8298-8379635bb03c\") " pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:39 crc kubenswrapper[4707]: I1204 09:44:39.886971 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn8np\" (UniqueName: \"kubernetes.io/projected/ad0fabbb-7625-4520-8298-8379635bb03c-kube-api-access-cn8np\") pod \"community-operators-xbrw2\" (UID: \"ad0fabbb-7625-4520-8298-8379635bb03c\") " pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:40 crc kubenswrapper[4707]: I1204 09:44:40.049397 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:40 crc kubenswrapper[4707]: I1204 09:44:40.199927 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-flfxl"] Dec 04 09:44:40 crc kubenswrapper[4707]: W1204 09:44:40.209379 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae1d3840_1144_4905_8415_8817aa67d299.slice/crio-367fc0f8fc808b2463df1548fc3e4caa32380de671c36596c4a9bccf9aa22aed WatchSource:0}: Error finding container 367fc0f8fc808b2463df1548fc3e4caa32380de671c36596c4a9bccf9aa22aed: Status 404 returned error can't find the container with id 367fc0f8fc808b2463df1548fc3e4caa32380de671c36596c4a9bccf9aa22aed Dec 04 09:44:40 crc kubenswrapper[4707]: I1204 09:44:40.234044 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-lzd69" Dec 04 09:44:40 crc kubenswrapper[4707]: I1204 09:44:40.302369 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n74dl"] Dec 04 09:44:40 crc kubenswrapper[4707]: I1204 09:44:40.404925 4707 generic.go:334] "Generic (PLEG): container finished" podID="5ff4a232-1027-4cb9-a021-9b320f41b041" containerID="656e777d6323e7917da7508e05e6f42947b700cd4ad14c4fe19da7ce26619cc1" exitCode=0 Dec 04 09:44:40 crc kubenswrapper[4707]: I1204 09:44:40.405979 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcrk2" event={"ID":"5ff4a232-1027-4cb9-a021-9b320f41b041","Type":"ContainerDied","Data":"656e777d6323e7917da7508e05e6f42947b700cd4ad14c4fe19da7ce26619cc1"} Dec 04 09:44:40 crc kubenswrapper[4707]: I1204 09:44:40.411892 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ngcrl" event={"ID":"2a03b094-f535-4b09-9bef-016450d98586","Type":"ContainerStarted","Data":"7aaf8823cc7ff0e336df8309bb4a31e77d2a6dfb4304d9e9c4d022fb6988a355"} Dec 04 09:44:40 crc kubenswrapper[4707]: I1204 09:44:40.414813 4707 generic.go:334] "Generic (PLEG): container finished" podID="ae1d3840-1144-4905-8415-8817aa67d299" containerID="dee99debff58b227f5f7faa81975ea2b2e0583d24dfb0afcc2552b41cb14ddc0" exitCode=0 Dec 04 09:44:40 crc kubenswrapper[4707]: I1204 09:44:40.414861 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-flfxl" event={"ID":"ae1d3840-1144-4905-8415-8817aa67d299","Type":"ContainerDied","Data":"dee99debff58b227f5f7faa81975ea2b2e0583d24dfb0afcc2552b41cb14ddc0"} Dec 04 09:44:40 crc kubenswrapper[4707]: I1204 09:44:40.414910 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-flfxl" event={"ID":"ae1d3840-1144-4905-8415-8817aa67d299","Type":"ContainerStarted","Data":"367fc0f8fc808b2463df1548fc3e4caa32380de671c36596c4a9bccf9aa22aed"} Dec 04 09:44:40 crc kubenswrapper[4707]: I1204 09:44:40.440588 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xbrw2"] Dec 04 09:44:40 crc kubenswrapper[4707]: W1204 09:44:40.444449 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad0fabbb_7625_4520_8298_8379635bb03c.slice/crio-ad47cfb7cbd8961b80693c3a321940659ceeb90613f5fa4844d49f2670b31798 WatchSource:0}: Error finding container ad47cfb7cbd8961b80693c3a321940659ceeb90613f5fa4844d49f2670b31798: Status 404 returned error can't find the container with id ad47cfb7cbd8961b80693c3a321940659ceeb90613f5fa4844d49f2670b31798 Dec 04 09:44:40 crc kubenswrapper[4707]: I1204 09:44:40.498108 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ngcrl" podStartSLOduration=2.013407641 podStartE2EDuration="3.49808871s" podCreationTimestamp="2025-12-04 09:44:37 +0000 UTC" firstStartedPulling="2025-12-04 09:44:38.384107841 +0000 UTC m=+377.819930348" lastFinishedPulling="2025-12-04 09:44:39.86878891 +0000 UTC m=+379.304611417" observedRunningTime="2025-12-04 09:44:40.495718237 +0000 UTC m=+379.931540774" watchObservedRunningTime="2025-12-04 09:44:40.49808871 +0000 UTC m=+379.933911217" Dec 04 09:44:41 crc kubenswrapper[4707]: I1204 09:44:41.433363 4707 generic.go:334] "Generic (PLEG): container finished" podID="ad0fabbb-7625-4520-8298-8379635bb03c" containerID="4f2fb95723ec2331a2d86dff3d0fb097fc4524f4d84a6e9469de19d356dd6d01" exitCode=0 Dec 04 09:44:41 crc kubenswrapper[4707]: I1204 09:44:41.433461 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xbrw2" event={"ID":"ad0fabbb-7625-4520-8298-8379635bb03c","Type":"ContainerDied","Data":"4f2fb95723ec2331a2d86dff3d0fb097fc4524f4d84a6e9469de19d356dd6d01"} Dec 04 09:44:41 crc kubenswrapper[4707]: I1204 09:44:41.433735 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xbrw2" event={"ID":"ad0fabbb-7625-4520-8298-8379635bb03c","Type":"ContainerStarted","Data":"ad47cfb7cbd8961b80693c3a321940659ceeb90613f5fa4844d49f2670b31798"} Dec 04 09:44:41 crc kubenswrapper[4707]: I1204 09:44:41.436644 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-flfxl" event={"ID":"ae1d3840-1144-4905-8415-8817aa67d299","Type":"ContainerStarted","Data":"48ce5e0f076b1eefb4d065c1815759648bfa461036227d2ee4e2ed746ecad010"} Dec 04 09:44:41 crc kubenswrapper[4707]: I1204 09:44:41.441912 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcrk2" event={"ID":"5ff4a232-1027-4cb9-a021-9b320f41b041","Type":"ContainerStarted","Data":"13ae7de5e165672332a0d6d492f030f1b05d2b82f4c5979a22827d5281b8271c"} Dec 04 09:44:41 crc kubenswrapper[4707]: I1204 09:44:41.489123 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pcrk2" podStartSLOduration=2.072027506 podStartE2EDuration="4.489107648s" podCreationTimestamp="2025-12-04 09:44:37 +0000 UTC" firstStartedPulling="2025-12-04 09:44:38.380632973 +0000 UTC m=+377.816455480" lastFinishedPulling="2025-12-04 09:44:40.797713115 +0000 UTC m=+380.233535622" observedRunningTime="2025-12-04 09:44:41.487834728 +0000 UTC m=+380.923657245" watchObservedRunningTime="2025-12-04 09:44:41.489107648 +0000 UTC m=+380.924930155" Dec 04 09:44:42 crc kubenswrapper[4707]: I1204 09:44:42.451841 4707 generic.go:334] "Generic (PLEG): container finished" podID="ad0fabbb-7625-4520-8298-8379635bb03c" containerID="0f69434d3df519ba4ead5026d8b92a3c537f1a5225b9a87f38d9cd62e5135e6c" exitCode=0 Dec 04 09:44:42 crc kubenswrapper[4707]: I1204 09:44:42.452851 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xbrw2" event={"ID":"ad0fabbb-7625-4520-8298-8379635bb03c","Type":"ContainerDied","Data":"0f69434d3df519ba4ead5026d8b92a3c537f1a5225b9a87f38d9cd62e5135e6c"} Dec 04 09:44:42 crc kubenswrapper[4707]: I1204 09:44:42.455530 4707 generic.go:334] "Generic (PLEG): container finished" podID="ae1d3840-1144-4905-8415-8817aa67d299" containerID="48ce5e0f076b1eefb4d065c1815759648bfa461036227d2ee4e2ed746ecad010" exitCode=0 Dec 04 09:44:42 crc kubenswrapper[4707]: I1204 09:44:42.456163 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-flfxl" event={"ID":"ae1d3840-1144-4905-8415-8817aa67d299","Type":"ContainerDied","Data":"48ce5e0f076b1eefb4d065c1815759648bfa461036227d2ee4e2ed746ecad010"} Dec 04 09:44:43 crc kubenswrapper[4707]: I1204 09:44:43.464116 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xbrw2" event={"ID":"ad0fabbb-7625-4520-8298-8379635bb03c","Type":"ContainerStarted","Data":"83463140a7055695dcab0a14806d39ecdfe984120e904c87ffdeb05f6c9671b5"} Dec 04 09:44:43 crc kubenswrapper[4707]: I1204 09:44:43.488156 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xbrw2" podStartSLOduration=3.042674984 podStartE2EDuration="4.488137629s" podCreationTimestamp="2025-12-04 09:44:39 +0000 UTC" firstStartedPulling="2025-12-04 09:44:41.43943334 +0000 UTC m=+380.875255847" lastFinishedPulling="2025-12-04 09:44:42.884895985 +0000 UTC m=+382.320718492" observedRunningTime="2025-12-04 09:44:43.48298709 +0000 UTC m=+382.918809597" watchObservedRunningTime="2025-12-04 09:44:43.488137629 +0000 UTC m=+382.923960136" Dec 04 09:44:45 crc kubenswrapper[4707]: I1204 09:44:45.479788 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-flfxl" event={"ID":"ae1d3840-1144-4905-8415-8817aa67d299","Type":"ContainerStarted","Data":"73cd7201216ac46e4fddf620ee580c8848d0aa8dbd7b52fc522b1d2a44d9ee9a"} Dec 04 09:44:45 crc kubenswrapper[4707]: I1204 09:44:45.503435 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-flfxl" podStartSLOduration=3.864248266 podStartE2EDuration="6.503414173s" podCreationTimestamp="2025-12-04 09:44:39 +0000 UTC" firstStartedPulling="2025-12-04 09:44:40.416154144 +0000 UTC m=+379.851976651" lastFinishedPulling="2025-12-04 09:44:43.055320051 +0000 UTC m=+382.491142558" observedRunningTime="2025-12-04 09:44:45.498824931 +0000 UTC m=+384.934647438" watchObservedRunningTime="2025-12-04 09:44:45.503414173 +0000 UTC m=+384.939236680" Dec 04 09:44:47 crc kubenswrapper[4707]: I1204 09:44:47.412761 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:47 crc kubenswrapper[4707]: I1204 09:44:47.413906 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:47 crc kubenswrapper[4707]: I1204 09:44:47.461219 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:47 crc kubenswrapper[4707]: I1204 09:44:47.527120 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ngcrl" Dec 04 09:44:47 crc kubenswrapper[4707]: I1204 09:44:47.602875 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:47 crc kubenswrapper[4707]: I1204 09:44:47.603213 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:47 crc kubenswrapper[4707]: I1204 09:44:47.644596 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:48 crc kubenswrapper[4707]: I1204 09:44:48.539364 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pcrk2" Dec 04 09:44:49 crc kubenswrapper[4707]: I1204 09:44:49.808676 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:44:49 crc kubenswrapper[4707]: I1204 09:44:49.809721 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:44:49 crc kubenswrapper[4707]: I1204 09:44:49.850889 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:44:50 crc kubenswrapper[4707]: I1204 09:44:50.050167 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:50 crc kubenswrapper[4707]: I1204 09:44:50.050247 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:50 crc kubenswrapper[4707]: I1204 09:44:50.086422 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:50 crc kubenswrapper[4707]: I1204 09:44:50.553886 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xbrw2" Dec 04 09:44:50 crc kubenswrapper[4707]: I1204 09:44:50.556693 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-flfxl" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.164398 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7"] Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.165802 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.168603 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.174759 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.178034 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7"] Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.346194 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4510910d-24a1-4530-b5a0-20d0a3be3570-secret-volume\") pod \"collect-profiles-29414025-8qcs7\" (UID: \"4510910d-24a1-4530-b5a0-20d0a3be3570\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.346250 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4510910d-24a1-4530-b5a0-20d0a3be3570-config-volume\") pod \"collect-profiles-29414025-8qcs7\" (UID: \"4510910d-24a1-4530-b5a0-20d0a3be3570\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.346366 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dmjm\" (UniqueName: \"kubernetes.io/projected/4510910d-24a1-4530-b5a0-20d0a3be3570-kube-api-access-7dmjm\") pod \"collect-profiles-29414025-8qcs7\" (UID: \"4510910d-24a1-4530-b5a0-20d0a3be3570\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.447903 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4510910d-24a1-4530-b5a0-20d0a3be3570-secret-volume\") pod \"collect-profiles-29414025-8qcs7\" (UID: \"4510910d-24a1-4530-b5a0-20d0a3be3570\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.447951 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4510910d-24a1-4530-b5a0-20d0a3be3570-config-volume\") pod \"collect-profiles-29414025-8qcs7\" (UID: \"4510910d-24a1-4530-b5a0-20d0a3be3570\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.447999 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dmjm\" (UniqueName: \"kubernetes.io/projected/4510910d-24a1-4530-b5a0-20d0a3be3570-kube-api-access-7dmjm\") pod \"collect-profiles-29414025-8qcs7\" (UID: \"4510910d-24a1-4530-b5a0-20d0a3be3570\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.448783 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4510910d-24a1-4530-b5a0-20d0a3be3570-config-volume\") pod \"collect-profiles-29414025-8qcs7\" (UID: \"4510910d-24a1-4530-b5a0-20d0a3be3570\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.454143 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4510910d-24a1-4530-b5a0-20d0a3be3570-secret-volume\") pod \"collect-profiles-29414025-8qcs7\" (UID: \"4510910d-24a1-4530-b5a0-20d0a3be3570\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.472447 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dmjm\" (UniqueName: \"kubernetes.io/projected/4510910d-24a1-4530-b5a0-20d0a3be3570-kube-api-access-7dmjm\") pod \"collect-profiles-29414025-8qcs7\" (UID: \"4510910d-24a1-4530-b5a0-20d0a3be3570\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.485385 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.817265 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.817389 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:45:00 crc kubenswrapper[4707]: I1204 09:45:00.915106 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7"] Dec 04 09:45:00 crc kubenswrapper[4707]: W1204 09:45:00.918868 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4510910d_24a1_4530_b5a0_20d0a3be3570.slice/crio-d5a6c5284dbb2420cc93129c943ccfc912b5a3fba19a2ccb9ab5bf00c1681d61 WatchSource:0}: Error finding container d5a6c5284dbb2420cc93129c943ccfc912b5a3fba19a2ccb9ab5bf00c1681d61: Status 404 returned error can't find the container with id d5a6c5284dbb2420cc93129c943ccfc912b5a3fba19a2ccb9ab5bf00c1681d61 Dec 04 09:45:01 crc kubenswrapper[4707]: I1204 09:45:01.559235 4707 generic.go:334] "Generic (PLEG): container finished" podID="4510910d-24a1-4530-b5a0-20d0a3be3570" containerID="df1b64565467d1f0b15edd871e5cff4aae56ba1d010f370cf15710a6c0d4eef1" exitCode=0 Dec 04 09:45:01 crc kubenswrapper[4707]: I1204 09:45:01.559310 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" event={"ID":"4510910d-24a1-4530-b5a0-20d0a3be3570","Type":"ContainerDied","Data":"df1b64565467d1f0b15edd871e5cff4aae56ba1d010f370cf15710a6c0d4eef1"} Dec 04 09:45:01 crc kubenswrapper[4707]: I1204 09:45:01.559571 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" event={"ID":"4510910d-24a1-4530-b5a0-20d0a3be3570","Type":"ContainerStarted","Data":"d5a6c5284dbb2420cc93129c943ccfc912b5a3fba19a2ccb9ab5bf00c1681d61"} Dec 04 09:45:02 crc kubenswrapper[4707]: I1204 09:45:02.836684 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" Dec 04 09:45:02 crc kubenswrapper[4707]: I1204 09:45:02.988413 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dmjm\" (UniqueName: \"kubernetes.io/projected/4510910d-24a1-4530-b5a0-20d0a3be3570-kube-api-access-7dmjm\") pod \"4510910d-24a1-4530-b5a0-20d0a3be3570\" (UID: \"4510910d-24a1-4530-b5a0-20d0a3be3570\") " Dec 04 09:45:02 crc kubenswrapper[4707]: I1204 09:45:02.988469 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4510910d-24a1-4530-b5a0-20d0a3be3570-secret-volume\") pod \"4510910d-24a1-4530-b5a0-20d0a3be3570\" (UID: \"4510910d-24a1-4530-b5a0-20d0a3be3570\") " Dec 04 09:45:02 crc kubenswrapper[4707]: I1204 09:45:02.988547 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4510910d-24a1-4530-b5a0-20d0a3be3570-config-volume\") pod \"4510910d-24a1-4530-b5a0-20d0a3be3570\" (UID: \"4510910d-24a1-4530-b5a0-20d0a3be3570\") " Dec 04 09:45:02 crc kubenswrapper[4707]: I1204 09:45:02.989184 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4510910d-24a1-4530-b5a0-20d0a3be3570-config-volume" (OuterVolumeSpecName: "config-volume") pod "4510910d-24a1-4530-b5a0-20d0a3be3570" (UID: "4510910d-24a1-4530-b5a0-20d0a3be3570"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:45:02 crc kubenswrapper[4707]: I1204 09:45:02.994125 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4510910d-24a1-4530-b5a0-20d0a3be3570-kube-api-access-7dmjm" (OuterVolumeSpecName: "kube-api-access-7dmjm") pod "4510910d-24a1-4530-b5a0-20d0a3be3570" (UID: "4510910d-24a1-4530-b5a0-20d0a3be3570"). InnerVolumeSpecName "kube-api-access-7dmjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:45:02 crc kubenswrapper[4707]: I1204 09:45:02.994205 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4510910d-24a1-4530-b5a0-20d0a3be3570-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4510910d-24a1-4530-b5a0-20d0a3be3570" (UID: "4510910d-24a1-4530-b5a0-20d0a3be3570"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:45:03 crc kubenswrapper[4707]: I1204 09:45:03.090025 4707 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4510910d-24a1-4530-b5a0-20d0a3be3570-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 09:45:03 crc kubenswrapper[4707]: I1204 09:45:03.090071 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dmjm\" (UniqueName: \"kubernetes.io/projected/4510910d-24a1-4530-b5a0-20d0a3be3570-kube-api-access-7dmjm\") on node \"crc\" DevicePath \"\"" Dec 04 09:45:03 crc kubenswrapper[4707]: I1204 09:45:03.090088 4707 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4510910d-24a1-4530-b5a0-20d0a3be3570-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 09:45:03 crc kubenswrapper[4707]: I1204 09:45:03.574813 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" event={"ID":"4510910d-24a1-4530-b5a0-20d0a3be3570","Type":"ContainerDied","Data":"d5a6c5284dbb2420cc93129c943ccfc912b5a3fba19a2ccb9ab5bf00c1681d61"} Dec 04 09:45:03 crc kubenswrapper[4707]: I1204 09:45:03.574858 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5a6c5284dbb2420cc93129c943ccfc912b5a3fba19a2ccb9ab5bf00c1681d61" Dec 04 09:45:03 crc kubenswrapper[4707]: I1204 09:45:03.574916 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414025-8qcs7" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.338877 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" podUID="44b581fc-38bf-4c33-820c-f27a4a730932" containerName="registry" containerID="cri-o://7279ac8ca0db88564285e726136750e8fb46a021023072d398b9e25ac77c186e" gracePeriod=30 Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.585839 4707 generic.go:334] "Generic (PLEG): container finished" podID="44b581fc-38bf-4c33-820c-f27a4a730932" containerID="7279ac8ca0db88564285e726136750e8fb46a021023072d398b9e25ac77c186e" exitCode=0 Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.585904 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" event={"ID":"44b581fc-38bf-4c33-820c-f27a4a730932","Type":"ContainerDied","Data":"7279ac8ca0db88564285e726136750e8fb46a021023072d398b9e25ac77c186e"} Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.765515 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.826987 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44b581fc-38bf-4c33-820c-f27a4a730932-trusted-ca\") pod \"44b581fc-38bf-4c33-820c-f27a4a730932\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.827028 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/44b581fc-38bf-4c33-820c-f27a4a730932-installation-pull-secrets\") pod \"44b581fc-38bf-4c33-820c-f27a4a730932\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.827046 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-567qw\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-kube-api-access-567qw\") pod \"44b581fc-38bf-4c33-820c-f27a4a730932\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.827070 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/44b581fc-38bf-4c33-820c-f27a4a730932-ca-trust-extracted\") pod \"44b581fc-38bf-4c33-820c-f27a4a730932\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.827258 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"44b581fc-38bf-4c33-820c-f27a4a730932\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.827288 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/44b581fc-38bf-4c33-820c-f27a4a730932-registry-certificates\") pod \"44b581fc-38bf-4c33-820c-f27a4a730932\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.827310 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-registry-tls\") pod \"44b581fc-38bf-4c33-820c-f27a4a730932\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.827345 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-bound-sa-token\") pod \"44b581fc-38bf-4c33-820c-f27a4a730932\" (UID: \"44b581fc-38bf-4c33-820c-f27a4a730932\") " Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.828148 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44b581fc-38bf-4c33-820c-f27a4a730932-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "44b581fc-38bf-4c33-820c-f27a4a730932" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.828709 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44b581fc-38bf-4c33-820c-f27a4a730932-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "44b581fc-38bf-4c33-820c-f27a4a730932" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.831864 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "44b581fc-38bf-4c33-820c-f27a4a730932" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.832368 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "44b581fc-38bf-4c33-820c-f27a4a730932" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.833881 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44b581fc-38bf-4c33-820c-f27a4a730932-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "44b581fc-38bf-4c33-820c-f27a4a730932" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.834097 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-kube-api-access-567qw" (OuterVolumeSpecName: "kube-api-access-567qw") pod "44b581fc-38bf-4c33-820c-f27a4a730932" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932"). InnerVolumeSpecName "kube-api-access-567qw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.839098 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "44b581fc-38bf-4c33-820c-f27a4a730932" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.843067 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44b581fc-38bf-4c33-820c-f27a4a730932-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "44b581fc-38bf-4c33-820c-f27a4a730932" (UID: "44b581fc-38bf-4c33-820c-f27a4a730932"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.928576 4707 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44b581fc-38bf-4c33-820c-f27a4a730932-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.928625 4707 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/44b581fc-38bf-4c33-820c-f27a4a730932-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.928640 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-567qw\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-kube-api-access-567qw\") on node \"crc\" DevicePath \"\"" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.928653 4707 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/44b581fc-38bf-4c33-820c-f27a4a730932-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.928665 4707 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/44b581fc-38bf-4c33-820c-f27a4a730932-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.928678 4707 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 04 09:45:05 crc kubenswrapper[4707]: I1204 09:45:05.928690 4707 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44b581fc-38bf-4c33-820c-f27a4a730932-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 04 09:45:06 crc kubenswrapper[4707]: I1204 09:45:06.592663 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" event={"ID":"44b581fc-38bf-4c33-820c-f27a4a730932","Type":"ContainerDied","Data":"6ead001147904c65853d22c7744a4e52fe2b4eee2476ac07e40076809d8fe86d"} Dec 04 09:45:06 crc kubenswrapper[4707]: I1204 09:45:06.592944 4707 scope.go:117] "RemoveContainer" containerID="7279ac8ca0db88564285e726136750e8fb46a021023072d398b9e25ac77c186e" Dec 04 09:45:06 crc kubenswrapper[4707]: I1204 09:45:06.592763 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-n74dl" Dec 04 09:45:06 crc kubenswrapper[4707]: I1204 09:45:06.630853 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n74dl"] Dec 04 09:45:06 crc kubenswrapper[4707]: I1204 09:45:06.634199 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n74dl"] Dec 04 09:45:06 crc kubenswrapper[4707]: I1204 09:45:06.854241 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44b581fc-38bf-4c33-820c-f27a4a730932" path="/var/lib/kubelet/pods/44b581fc-38bf-4c33-820c-f27a4a730932/volumes" Dec 04 09:45:30 crc kubenswrapper[4707]: I1204 09:45:30.817184 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:45:30 crc kubenswrapper[4707]: I1204 09:45:30.818588 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:45:30 crc kubenswrapper[4707]: I1204 09:45:30.818677 4707 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:45:30 crc kubenswrapper[4707]: I1204 09:45:30.819388 4707 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f92df9cc0b8fd804a06257feebcda7fbf11147429c710e85020f10339b40deac"} pod="openshift-machine-config-operator/machine-config-daemon-c244z" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 09:45:30 crc kubenswrapper[4707]: I1204 09:45:30.819464 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" containerID="cri-o://f92df9cc0b8fd804a06257feebcda7fbf11147429c710e85020f10339b40deac" gracePeriod=600 Dec 04 09:45:31 crc kubenswrapper[4707]: I1204 09:45:31.733163 4707 generic.go:334] "Generic (PLEG): container finished" podID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerID="f92df9cc0b8fd804a06257feebcda7fbf11147429c710e85020f10339b40deac" exitCode=0 Dec 04 09:45:31 crc kubenswrapper[4707]: I1204 09:45:31.733212 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerDied","Data":"f92df9cc0b8fd804a06257feebcda7fbf11147429c710e85020f10339b40deac"} Dec 04 09:45:31 crc kubenswrapper[4707]: I1204 09:45:31.733634 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerStarted","Data":"59933ba8a620838bec3b3d9fcf426225c5ef7bc5c9d364798d360eff4e7c02b9"} Dec 04 09:45:31 crc kubenswrapper[4707]: I1204 09:45:31.733663 4707 scope.go:117] "RemoveContainer" containerID="d29778aa0c80e183d3ae046afff38b418defc2901adaaeca2213c4d59c1310bc" Dec 04 09:47:21 crc kubenswrapper[4707]: I1204 09:47:21.008059 4707 scope.go:117] "RemoveContainer" containerID="1f1c2a4ca770a1414b95e79c232af81f65a4e62af622a3945b3ebbafba969e63" Dec 04 09:48:00 crc kubenswrapper[4707]: I1204 09:48:00.817322 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:48:00 crc kubenswrapper[4707]: I1204 09:48:00.817976 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:48:21 crc kubenswrapper[4707]: I1204 09:48:21.036446 4707 scope.go:117] "RemoveContainer" containerID="be070ee7f9cb7d1d36021357c5e5d096dcb96d28f29636f7de6a42e44ed9bfaf" Dec 04 09:48:21 crc kubenswrapper[4707]: I1204 09:48:21.056478 4707 scope.go:117] "RemoveContainer" containerID="6277faef785e7ec6a2aa2e75ad9ba784613777409692d91f378bbbbb51e5c4f7" Dec 04 09:48:30 crc kubenswrapper[4707]: I1204 09:48:30.817742 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:48:30 crc kubenswrapper[4707]: I1204 09:48:30.818282 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:49:00 crc kubenswrapper[4707]: I1204 09:49:00.817440 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:49:00 crc kubenswrapper[4707]: I1204 09:49:00.818027 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:49:00 crc kubenswrapper[4707]: I1204 09:49:00.818122 4707 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:49:00 crc kubenswrapper[4707]: I1204 09:49:00.818905 4707 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"59933ba8a620838bec3b3d9fcf426225c5ef7bc5c9d364798d360eff4e7c02b9"} pod="openshift-machine-config-operator/machine-config-daemon-c244z" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 09:49:00 crc kubenswrapper[4707]: I1204 09:49:00.818981 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" containerID="cri-o://59933ba8a620838bec3b3d9fcf426225c5ef7bc5c9d364798d360eff4e7c02b9" gracePeriod=600 Dec 04 09:49:01 crc kubenswrapper[4707]: I1204 09:49:01.410444 4707 generic.go:334] "Generic (PLEG): container finished" podID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerID="59933ba8a620838bec3b3d9fcf426225c5ef7bc5c9d364798d360eff4e7c02b9" exitCode=0 Dec 04 09:49:01 crc kubenswrapper[4707]: I1204 09:49:01.410521 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerDied","Data":"59933ba8a620838bec3b3d9fcf426225c5ef7bc5c9d364798d360eff4e7c02b9"} Dec 04 09:49:01 crc kubenswrapper[4707]: I1204 09:49:01.410780 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerStarted","Data":"38c08b074cc460fc3513402fc630433eabee9cc90aab57117db09d7ee10fc03a"} Dec 04 09:49:01 crc kubenswrapper[4707]: I1204 09:49:01.410799 4707 scope.go:117] "RemoveContainer" containerID="f92df9cc0b8fd804a06257feebcda7fbf11147429c710e85020f10339b40deac" Dec 04 09:50:38 crc kubenswrapper[4707]: I1204 09:50:38.578742 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6nd57"] Dec 04 09:50:38 crc kubenswrapper[4707]: I1204 09:50:38.580243 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovn-controller" containerID="cri-o://ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173" gracePeriod=30 Dec 04 09:50:38 crc kubenswrapper[4707]: I1204 09:50:38.580311 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="nbdb" containerID="cri-o://49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97" gracePeriod=30 Dec 04 09:50:38 crc kubenswrapper[4707]: I1204 09:50:38.580441 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="northd" containerID="cri-o://0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df" gracePeriod=30 Dec 04 09:50:38 crc kubenswrapper[4707]: I1204 09:50:38.580516 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b" gracePeriod=30 Dec 04 09:50:38 crc kubenswrapper[4707]: I1204 09:50:38.580540 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="sbdb" containerID="cri-o://ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f" gracePeriod=30 Dec 04 09:50:38 crc kubenswrapper[4707]: I1204 09:50:38.580576 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="kube-rbac-proxy-node" containerID="cri-o://f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472" gracePeriod=30 Dec 04 09:50:38 crc kubenswrapper[4707]: I1204 09:50:38.580638 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovn-acl-logging" containerID="cri-o://474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75" gracePeriod=30 Dec 04 09:50:38 crc kubenswrapper[4707]: I1204 09:50:38.621379 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" containerID="cri-o://62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31" gracePeriod=30 Dec 04 09:50:38 crc kubenswrapper[4707]: I1204 09:50:38.954956 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/3.log" Dec 04 09:50:38 crc kubenswrapper[4707]: I1204 09:50:38.957586 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovn-acl-logging/0.log" Dec 04 09:50:38 crc kubenswrapper[4707]: I1204 09:50:38.958365 4707 generic.go:334] "Generic (PLEG): container finished" podID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerID="474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75" exitCode=143 Dec 04 09:50:38 crc kubenswrapper[4707]: I1204 09:50:38.958411 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerDied","Data":"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75"} Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.328587 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/3.log" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.330619 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovn-acl-logging/0.log" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.331091 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovn-controller/0.log" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.331808 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381067 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-cmghw"] Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.381282 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381295 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.381308 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovn-acl-logging" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381314 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovn-acl-logging" Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.381320 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="kube-rbac-proxy-node" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381326 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="kube-rbac-proxy-node" Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.381359 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="northd" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381366 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="northd" Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.381377 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="sbdb" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381384 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="sbdb" Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.381395 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4510910d-24a1-4530-b5a0-20d0a3be3570" containerName="collect-profiles" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381404 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="4510910d-24a1-4530-b5a0-20d0a3be3570" containerName="collect-profiles" Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.381412 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381418 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.381427 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381433 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.381450 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="kubecfg-setup" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381457 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="kubecfg-setup" Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.381463 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="kube-rbac-proxy-ovn-metrics" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381470 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="kube-rbac-proxy-ovn-metrics" Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.381479 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44b581fc-38bf-4c33-820c-f27a4a730932" containerName="registry" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381487 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="44b581fc-38bf-4c33-820c-f27a4a730932" containerName="registry" Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.381502 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="nbdb" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381509 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="nbdb" Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.381520 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovn-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381527 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovn-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381664 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381679 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="nbdb" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381690 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovn-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381699 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381706 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="kube-rbac-proxy-node" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381713 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="kube-rbac-proxy-ovn-metrics" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381720 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381728 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovn-acl-logging" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381739 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="sbdb" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381746 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381755 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="northd" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381765 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="44b581fc-38bf-4c33-820c-f27a4a730932" containerName="registry" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381774 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="4510910d-24a1-4530-b5a0-20d0a3be3570" containerName="collect-profiles" Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.381878 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.381888 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.382002 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: E1204 09:50:39.382139 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.382150 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerName="ovnkube-controller" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.383706 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.416909 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-cni-netd\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.416950 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-systemd-units\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.416970 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.416993 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovnkube-config\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417008 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-systemd\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417027 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-log-socket\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417043 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-node-log\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417051 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417058 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-etc-openvswitch\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417083 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417116 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-log-socket" (OuterVolumeSpecName: "log-socket") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417122 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f472b\" (UniqueName: \"kubernetes.io/projected/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-kube-api-access-f472b\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417126 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417161 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-cni-bin\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417178 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-node-log" (OuterVolumeSpecName: "node-log") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417185 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-run-ovn-kubernetes\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417199 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417207 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-var-lib-openvswitch\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417220 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417235 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-env-overrides\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417242 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417257 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-slash\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417286 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovn-node-metrics-cert\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417081 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417319 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-run-netns\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417348 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-slash" (OuterVolumeSpecName: "host-slash") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417367 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-kubelet\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417395 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-openvswitch\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417420 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-ovn\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417452 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovnkube-script-lib\") pod \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\" (UID: \"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb\") " Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417551 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-run-netns\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417581 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-etc-openvswitch\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417603 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-slash\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417624 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-ovnkube-script-lib\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417657 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-var-lib-openvswitch\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417684 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-cni-netd\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417692 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417704 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-env-overrides\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417747 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417777 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417778 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-ovn-node-metrics-cert\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417803 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417810 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4msh\" (UniqueName: \"kubernetes.io/projected/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-kube-api-access-x4msh\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417830 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417872 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-run-systemd\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417935 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-kubelet\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.417958 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-systemd-units\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418161 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-run-ovn\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418212 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418234 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418238 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-cni-bin\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418296 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418313 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-log-socket\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418369 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-node-log\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418392 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-run-ovn-kubernetes\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418467 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-ovnkube-config\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418516 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-run-openvswitch\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418620 4707 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418635 4707 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-slash\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418646 4707 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418659 4707 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418669 4707 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418680 4707 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418692 4707 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418702 4707 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418714 4707 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418725 4707 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418736 4707 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418749 4707 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-log-socket\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418759 4707 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418769 4707 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-node-log\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418779 4707 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418789 4707 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.418800 4707 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.422413 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.422798 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-kube-api-access-f472b" (OuterVolumeSpecName: "kube-api-access-f472b") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "kube-api-access-f472b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.430214 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" (UID: "5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519444 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-cni-netd\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519497 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-env-overrides\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519529 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-ovn-node-metrics-cert\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519532 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-cni-netd\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519551 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4msh\" (UniqueName: \"kubernetes.io/projected/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-kube-api-access-x4msh\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519602 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-run-systemd\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519641 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-run-ovn\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519661 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-kubelet\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519682 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-systemd-units\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519706 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519729 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-cni-bin\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519749 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-log-socket\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519766 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-node-log\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519786 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-run-ovn-kubernetes\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519813 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-ovnkube-config\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519842 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-run-openvswitch\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519876 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-etc-openvswitch\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519897 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-run-netns\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519931 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-slash\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519953 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-ovnkube-script-lib\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.519987 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-var-lib-openvswitch\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520039 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f472b\" (UniqueName: \"kubernetes.io/projected/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-kube-api-access-f472b\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520062 4707 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520077 4707 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520081 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-run-systemd\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520113 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-run-ovn-kubernetes\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520054 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-node-log\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520132 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-run-ovn\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520149 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-kubelet\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520183 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-systemd-units\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520206 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520231 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-cni-bin\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520250 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-log-socket\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520272 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-run-netns\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520293 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-run-openvswitch\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520313 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-etc-openvswitch\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520539 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-var-lib-openvswitch\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520598 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-host-slash\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520713 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-env-overrides\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520750 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-ovnkube-config\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.520989 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-ovnkube-script-lib\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.523544 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-ovn-node-metrics-cert\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.535348 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4msh\" (UniqueName: \"kubernetes.io/projected/8a4bf5df-2995-4d05-bc37-37d3b8e8706e-kube-api-access-x4msh\") pod \"ovnkube-node-cmghw\" (UID: \"8a4bf5df-2995-4d05-bc37-37d3b8e8706e\") " pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.697028 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.976762 4707 generic.go:334] "Generic (PLEG): container finished" podID="8a4bf5df-2995-4d05-bc37-37d3b8e8706e" containerID="7449451152acf2437db07a2b9798edbe67a3d1cce930d03787870b5dece79fc6" exitCode=0 Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.976825 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" event={"ID":"8a4bf5df-2995-4d05-bc37-37d3b8e8706e","Type":"ContainerDied","Data":"7449451152acf2437db07a2b9798edbe67a3d1cce930d03787870b5dece79fc6"} Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.977205 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" event={"ID":"8a4bf5df-2995-4d05-bc37-37d3b8e8706e","Type":"ContainerStarted","Data":"6b558cf497c4a93550ff082c2d71cda0a8aba952974e0ac13eb2d51427698dd1"} Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.979961 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-npc85_e9d3467a-1f4a-4d54-97b3-c7fd062eff13/kube-multus/2.log" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.980696 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-npc85_e9d3467a-1f4a-4d54-97b3-c7fd062eff13/kube-multus/1.log" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.980743 4707 generic.go:334] "Generic (PLEG): container finished" podID="e9d3467a-1f4a-4d54-97b3-c7fd062eff13" containerID="bb57638a447d7d048f32e79b4b13aa36ab3639fa6197fbb6f5bdb6ec80fdcb24" exitCode=2 Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.980812 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-npc85" event={"ID":"e9d3467a-1f4a-4d54-97b3-c7fd062eff13","Type":"ContainerDied","Data":"bb57638a447d7d048f32e79b4b13aa36ab3639fa6197fbb6f5bdb6ec80fdcb24"} Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.980866 4707 scope.go:117] "RemoveContainer" containerID="861f35b22449ce757109761274b8dca3126dee803aed610da0d726e6d510de95" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.981296 4707 scope.go:117] "RemoveContainer" containerID="bb57638a447d7d048f32e79b4b13aa36ab3639fa6197fbb6f5bdb6ec80fdcb24" Dec 04 09:50:39 crc kubenswrapper[4707]: I1204 09:50:39.985035 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovnkube-controller/3.log" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.008101 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovn-acl-logging/0.log" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.008679 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6nd57_5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/ovn-controller/0.log" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.009240 4707 generic.go:334] "Generic (PLEG): container finished" podID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerID="62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31" exitCode=0 Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.009274 4707 generic.go:334] "Generic (PLEG): container finished" podID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerID="ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f" exitCode=0 Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.009281 4707 generic.go:334] "Generic (PLEG): container finished" podID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerID="49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97" exitCode=0 Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.009291 4707 generic.go:334] "Generic (PLEG): container finished" podID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerID="0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df" exitCode=0 Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.009298 4707 generic.go:334] "Generic (PLEG): container finished" podID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerID="ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b" exitCode=0 Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.009304 4707 generic.go:334] "Generic (PLEG): container finished" podID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerID="f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472" exitCode=0 Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.009310 4707 generic.go:334] "Generic (PLEG): container finished" podID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" containerID="ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173" exitCode=143 Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.009322 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.009346 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerDied","Data":"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.009722 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerDied","Data":"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.009795 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerDied","Data":"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.009896 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerDied","Data":"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.009965 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerDied","Data":"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010023 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerDied","Data":"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010084 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010140 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010199 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010252 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010326 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010396 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010446 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010494 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010574 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010627 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010681 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerDied","Data":"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010736 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010786 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010834 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010884 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.010939 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011005 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011062 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011108 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011151 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011203 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011257 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6nd57" event={"ID":"5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb","Type":"ContainerDied","Data":"b4924668838c4191ef62e7e1dc1e72fcb7e8f31b607e0fa28ae52470fe6c3cc1"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011310 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011380 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011430 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011474 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011521 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011568 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011614 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011663 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011708 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.011753 4707 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c"} Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.079907 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6nd57"] Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.080192 4707 scope.go:117] "RemoveContainer" containerID="62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.087983 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6nd57"] Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.115270 4707 scope.go:117] "RemoveContainer" containerID="1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.129363 4707 scope.go:117] "RemoveContainer" containerID="ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.142852 4707 scope.go:117] "RemoveContainer" containerID="49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.156634 4707 scope.go:117] "RemoveContainer" containerID="0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.171222 4707 scope.go:117] "RemoveContainer" containerID="ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.186008 4707 scope.go:117] "RemoveContainer" containerID="f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.204427 4707 scope.go:117] "RemoveContainer" containerID="474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.224174 4707 scope.go:117] "RemoveContainer" containerID="ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.246088 4707 scope.go:117] "RemoveContainer" containerID="d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.269806 4707 scope.go:117] "RemoveContainer" containerID="62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31" Dec 04 09:50:40 crc kubenswrapper[4707]: E1204 09:50:40.270270 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31\": container with ID starting with 62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31 not found: ID does not exist" containerID="62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.270324 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31"} err="failed to get container status \"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31\": rpc error: code = NotFound desc = could not find container \"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31\": container with ID starting with 62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.270375 4707 scope.go:117] "RemoveContainer" containerID="1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025" Dec 04 09:50:40 crc kubenswrapper[4707]: E1204 09:50:40.270737 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\": container with ID starting with 1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025 not found: ID does not exist" containerID="1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.270769 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025"} err="failed to get container status \"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\": rpc error: code = NotFound desc = could not find container \"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\": container with ID starting with 1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.270800 4707 scope.go:117] "RemoveContainer" containerID="ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f" Dec 04 09:50:40 crc kubenswrapper[4707]: E1204 09:50:40.271007 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\": container with ID starting with ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f not found: ID does not exist" containerID="ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.271033 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f"} err="failed to get container status \"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\": rpc error: code = NotFound desc = could not find container \"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\": container with ID starting with ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.271050 4707 scope.go:117] "RemoveContainer" containerID="49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97" Dec 04 09:50:40 crc kubenswrapper[4707]: E1204 09:50:40.271281 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\": container with ID starting with 49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97 not found: ID does not exist" containerID="49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.271312 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97"} err="failed to get container status \"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\": rpc error: code = NotFound desc = could not find container \"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\": container with ID starting with 49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.271330 4707 scope.go:117] "RemoveContainer" containerID="0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df" Dec 04 09:50:40 crc kubenswrapper[4707]: E1204 09:50:40.271597 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\": container with ID starting with 0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df not found: ID does not exist" containerID="0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.271627 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df"} err="failed to get container status \"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\": rpc error: code = NotFound desc = could not find container \"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\": container with ID starting with 0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.271647 4707 scope.go:117] "RemoveContainer" containerID="ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b" Dec 04 09:50:40 crc kubenswrapper[4707]: E1204 09:50:40.271867 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\": container with ID starting with ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b not found: ID does not exist" containerID="ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.271894 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b"} err="failed to get container status \"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\": rpc error: code = NotFound desc = could not find container \"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\": container with ID starting with ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.271909 4707 scope.go:117] "RemoveContainer" containerID="f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472" Dec 04 09:50:40 crc kubenswrapper[4707]: E1204 09:50:40.272109 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\": container with ID starting with f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472 not found: ID does not exist" containerID="f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.272132 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472"} err="failed to get container status \"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\": rpc error: code = NotFound desc = could not find container \"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\": container with ID starting with f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.272145 4707 scope.go:117] "RemoveContainer" containerID="474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75" Dec 04 09:50:40 crc kubenswrapper[4707]: E1204 09:50:40.272357 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\": container with ID starting with 474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75 not found: ID does not exist" containerID="474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.272383 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75"} err="failed to get container status \"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\": rpc error: code = NotFound desc = could not find container \"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\": container with ID starting with 474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.272402 4707 scope.go:117] "RemoveContainer" containerID="ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173" Dec 04 09:50:40 crc kubenswrapper[4707]: E1204 09:50:40.272617 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\": container with ID starting with ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173 not found: ID does not exist" containerID="ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.272638 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173"} err="failed to get container status \"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\": rpc error: code = NotFound desc = could not find container \"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\": container with ID starting with ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.272652 4707 scope.go:117] "RemoveContainer" containerID="d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c" Dec 04 09:50:40 crc kubenswrapper[4707]: E1204 09:50:40.272870 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\": container with ID starting with d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c not found: ID does not exist" containerID="d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.272886 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c"} err="failed to get container status \"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\": rpc error: code = NotFound desc = could not find container \"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\": container with ID starting with d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.272898 4707 scope.go:117] "RemoveContainer" containerID="62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.273092 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31"} err="failed to get container status \"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31\": rpc error: code = NotFound desc = could not find container \"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31\": container with ID starting with 62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.273115 4707 scope.go:117] "RemoveContainer" containerID="1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.273311 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025"} err="failed to get container status \"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\": rpc error: code = NotFound desc = could not find container \"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\": container with ID starting with 1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.273351 4707 scope.go:117] "RemoveContainer" containerID="ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.273583 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f"} err="failed to get container status \"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\": rpc error: code = NotFound desc = could not find container \"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\": container with ID starting with ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.273608 4707 scope.go:117] "RemoveContainer" containerID="49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.273814 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97"} err="failed to get container status \"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\": rpc error: code = NotFound desc = could not find container \"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\": container with ID starting with 49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.273838 4707 scope.go:117] "RemoveContainer" containerID="0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.274055 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df"} err="failed to get container status \"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\": rpc error: code = NotFound desc = could not find container \"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\": container with ID starting with 0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.274075 4707 scope.go:117] "RemoveContainer" containerID="ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.274310 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b"} err="failed to get container status \"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\": rpc error: code = NotFound desc = could not find container \"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\": container with ID starting with ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.274348 4707 scope.go:117] "RemoveContainer" containerID="f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.274600 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472"} err="failed to get container status \"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\": rpc error: code = NotFound desc = could not find container \"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\": container with ID starting with f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.274623 4707 scope.go:117] "RemoveContainer" containerID="474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.274800 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75"} err="failed to get container status \"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\": rpc error: code = NotFound desc = could not find container \"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\": container with ID starting with 474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.274820 4707 scope.go:117] "RemoveContainer" containerID="ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.275015 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173"} err="failed to get container status \"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\": rpc error: code = NotFound desc = could not find container \"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\": container with ID starting with ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.275040 4707 scope.go:117] "RemoveContainer" containerID="d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.275242 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c"} err="failed to get container status \"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\": rpc error: code = NotFound desc = could not find container \"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\": container with ID starting with d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.275265 4707 scope.go:117] "RemoveContainer" containerID="62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.275474 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31"} err="failed to get container status \"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31\": rpc error: code = NotFound desc = could not find container \"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31\": container with ID starting with 62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.275496 4707 scope.go:117] "RemoveContainer" containerID="1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.275709 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025"} err="failed to get container status \"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\": rpc error: code = NotFound desc = could not find container \"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\": container with ID starting with 1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.275734 4707 scope.go:117] "RemoveContainer" containerID="ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.275939 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f"} err="failed to get container status \"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\": rpc error: code = NotFound desc = could not find container \"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\": container with ID starting with ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.275958 4707 scope.go:117] "RemoveContainer" containerID="49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.276153 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97"} err="failed to get container status \"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\": rpc error: code = NotFound desc = could not find container \"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\": container with ID starting with 49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.276177 4707 scope.go:117] "RemoveContainer" containerID="0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.276384 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df"} err="failed to get container status \"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\": rpc error: code = NotFound desc = could not find container \"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\": container with ID starting with 0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.276405 4707 scope.go:117] "RemoveContainer" containerID="ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.276610 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b"} err="failed to get container status \"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\": rpc error: code = NotFound desc = could not find container \"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\": container with ID starting with ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.276634 4707 scope.go:117] "RemoveContainer" containerID="f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.276851 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472"} err="failed to get container status \"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\": rpc error: code = NotFound desc = could not find container \"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\": container with ID starting with f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.276871 4707 scope.go:117] "RemoveContainer" containerID="474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.277071 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75"} err="failed to get container status \"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\": rpc error: code = NotFound desc = could not find container \"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\": container with ID starting with 474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.277094 4707 scope.go:117] "RemoveContainer" containerID="ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.277293 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173"} err="failed to get container status \"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\": rpc error: code = NotFound desc = could not find container \"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\": container with ID starting with ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.277312 4707 scope.go:117] "RemoveContainer" containerID="d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.277671 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c"} err="failed to get container status \"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\": rpc error: code = NotFound desc = could not find container \"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\": container with ID starting with d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.277695 4707 scope.go:117] "RemoveContainer" containerID="62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.277937 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31"} err="failed to get container status \"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31\": rpc error: code = NotFound desc = could not find container \"62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31\": container with ID starting with 62acdc60d80bf1e211f214aecf450f03d5db7be7fc6d33a879a13d1f252c9d31 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.277956 4707 scope.go:117] "RemoveContainer" containerID="1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.278169 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025"} err="failed to get container status \"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\": rpc error: code = NotFound desc = could not find container \"1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025\": container with ID starting with 1504ef648166c0519bc6355146c84b296aea9182a894802aa5c05e9bcfd4a025 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.278191 4707 scope.go:117] "RemoveContainer" containerID="ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.278388 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f"} err="failed to get container status \"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\": rpc error: code = NotFound desc = could not find container \"ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f\": container with ID starting with ba90982398c33942f156098cf1b08b98f976409885df90d9dab4654e3812ce7f not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.278435 4707 scope.go:117] "RemoveContainer" containerID="49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.278672 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97"} err="failed to get container status \"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\": rpc error: code = NotFound desc = could not find container \"49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97\": container with ID starting with 49f3c654391862d995052e2189f956ce9dee1884d1191c21f3974d75b44e3e97 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.278695 4707 scope.go:117] "RemoveContainer" containerID="0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.278915 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df"} err="failed to get container status \"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\": rpc error: code = NotFound desc = could not find container \"0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df\": container with ID starting with 0ff1a212aa9f2e42d86782e7da2d522ce7ce2663ca12e667fa93d1cc7f23f3df not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.278931 4707 scope.go:117] "RemoveContainer" containerID="ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.279177 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b"} err="failed to get container status \"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\": rpc error: code = NotFound desc = could not find container \"ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b\": container with ID starting with ad372d35b521bc65174169642bc5b107f04cedb5774e72d1623c58f11cd8bb3b not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.279200 4707 scope.go:117] "RemoveContainer" containerID="f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.279508 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472"} err="failed to get container status \"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\": rpc error: code = NotFound desc = could not find container \"f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472\": container with ID starting with f3a6c8315c8eeeeadcc5292800a3c2033f5b22d88beb08bfc133eb57e85cc472 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.279533 4707 scope.go:117] "RemoveContainer" containerID="474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.279729 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75"} err="failed to get container status \"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\": rpc error: code = NotFound desc = could not find container \"474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75\": container with ID starting with 474615ea1efd639222a730af850f0738cbf750eb43b76bc557fce154a9659f75 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.279750 4707 scope.go:117] "RemoveContainer" containerID="ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.279954 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173"} err="failed to get container status \"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\": rpc error: code = NotFound desc = could not find container \"ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173\": container with ID starting with ff66b692ba5c346e31f9abceab8a1b238144a8f35b9adf377e94f05b874e2173 not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.279978 4707 scope.go:117] "RemoveContainer" containerID="d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.280164 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c"} err="failed to get container status \"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\": rpc error: code = NotFound desc = could not find container \"d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c\": container with ID starting with d1607d2c8024d14f61d30d0390a4cd5d40ff33d3365bbf0b7d7b0b428c07176c not found: ID does not exist" Dec 04 09:50:40 crc kubenswrapper[4707]: I1204 09:50:40.854144 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb" path="/var/lib/kubelet/pods/5eddef4f-ede7-4ed6-8b03-acd65c4b9bdb/volumes" Dec 04 09:50:41 crc kubenswrapper[4707]: I1204 09:50:41.029902 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" event={"ID":"8a4bf5df-2995-4d05-bc37-37d3b8e8706e","Type":"ContainerStarted","Data":"a1a2ee82491f697fc3aacd7812a9ef6fb32a0a3b50e9b06978cbd17bac439c00"} Dec 04 09:50:41 crc kubenswrapper[4707]: I1204 09:50:41.029953 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" event={"ID":"8a4bf5df-2995-4d05-bc37-37d3b8e8706e","Type":"ContainerStarted","Data":"a13f59c8c5b82ce912ab676ad05462b0eb26bcf84dca2cb7b9eabb807749319a"} Dec 04 09:50:41 crc kubenswrapper[4707]: I1204 09:50:41.029968 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" event={"ID":"8a4bf5df-2995-4d05-bc37-37d3b8e8706e","Type":"ContainerStarted","Data":"30661afa7897f089dc312830e7d81de5ff977c6866f52a19ededcd00f9c616d2"} Dec 04 09:50:41 crc kubenswrapper[4707]: I1204 09:50:41.029979 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" event={"ID":"8a4bf5df-2995-4d05-bc37-37d3b8e8706e","Type":"ContainerStarted","Data":"6b0aa52cc9530d83f5dc89732560cb5d54a9c93749f3c0cff963f229c371698c"} Dec 04 09:50:41 crc kubenswrapper[4707]: I1204 09:50:41.029990 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" event={"ID":"8a4bf5df-2995-4d05-bc37-37d3b8e8706e","Type":"ContainerStarted","Data":"594755cc51c18d958beaab955b680a7d5ed972cf7d2a26b2ff265de6a17663a7"} Dec 04 09:50:41 crc kubenswrapper[4707]: I1204 09:50:41.030001 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" event={"ID":"8a4bf5df-2995-4d05-bc37-37d3b8e8706e","Type":"ContainerStarted","Data":"eb4ca399c7d9df913b59154863c7696c9f0405f3df85eb20346d9e40ae05519a"} Dec 04 09:50:41 crc kubenswrapper[4707]: I1204 09:50:41.031846 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-npc85_e9d3467a-1f4a-4d54-97b3-c7fd062eff13/kube-multus/2.log" Dec 04 09:50:41 crc kubenswrapper[4707]: I1204 09:50:41.031899 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-npc85" event={"ID":"e9d3467a-1f4a-4d54-97b3-c7fd062eff13","Type":"ContainerStarted","Data":"6948d812d408a5b1868bbe62a2f51088201c95c69b82fdad5cc499a58006a6fe"} Dec 04 09:50:43 crc kubenswrapper[4707]: I1204 09:50:43.045726 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" event={"ID":"8a4bf5df-2995-4d05-bc37-37d3b8e8706e","Type":"ContainerStarted","Data":"8bb9d2b444c59dcdc43652abca7677cd1fad93705384268c5237929d13d1c1ec"} Dec 04 09:50:46 crc kubenswrapper[4707]: I1204 09:50:46.064316 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" event={"ID":"8a4bf5df-2995-4d05-bc37-37d3b8e8706e","Type":"ContainerStarted","Data":"61ccf9385ae6c3fe02f82ca6f12e9fb5cce1d8787385e06c66df1477f470df96"} Dec 04 09:50:46 crc kubenswrapper[4707]: I1204 09:50:46.065038 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:46 crc kubenswrapper[4707]: I1204 09:50:46.065067 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:46 crc kubenswrapper[4707]: I1204 09:50:46.065083 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:46 crc kubenswrapper[4707]: I1204 09:50:46.094639 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:46 crc kubenswrapper[4707]: I1204 09:50:46.098588 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:50:46 crc kubenswrapper[4707]: I1204 09:50:46.107075 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" podStartSLOduration=7.107057109 podStartE2EDuration="7.107057109s" podCreationTimestamp="2025-12-04 09:50:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:50:46.104522107 +0000 UTC m=+745.540344624" watchObservedRunningTime="2025-12-04 09:50:46.107057109 +0000 UTC m=+745.542879626" Dec 04 09:50:52 crc kubenswrapper[4707]: I1204 09:50:52.556373 4707 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.035017 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz"] Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.036429 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.038828 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.045714 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz"] Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.173534 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwkkt\" (UniqueName: \"kubernetes.io/projected/1b113f24-11b8-4720-87a6-ccae8b3f888e-kube-api-access-vwkkt\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz\" (UID: \"1b113f24-11b8-4720-87a6-ccae8b3f888e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.173612 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1b113f24-11b8-4720-87a6-ccae8b3f888e-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz\" (UID: \"1b113f24-11b8-4720-87a6-ccae8b3f888e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.173695 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1b113f24-11b8-4720-87a6-ccae8b3f888e-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz\" (UID: \"1b113f24-11b8-4720-87a6-ccae8b3f888e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.275392 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1b113f24-11b8-4720-87a6-ccae8b3f888e-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz\" (UID: \"1b113f24-11b8-4720-87a6-ccae8b3f888e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.275528 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1b113f24-11b8-4720-87a6-ccae8b3f888e-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz\" (UID: \"1b113f24-11b8-4720-87a6-ccae8b3f888e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.275586 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwkkt\" (UniqueName: \"kubernetes.io/projected/1b113f24-11b8-4720-87a6-ccae8b3f888e-kube-api-access-vwkkt\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz\" (UID: \"1b113f24-11b8-4720-87a6-ccae8b3f888e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.275861 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1b113f24-11b8-4720-87a6-ccae8b3f888e-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz\" (UID: \"1b113f24-11b8-4720-87a6-ccae8b3f888e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.276092 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1b113f24-11b8-4720-87a6-ccae8b3f888e-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz\" (UID: \"1b113f24-11b8-4720-87a6-ccae8b3f888e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.299101 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwkkt\" (UniqueName: \"kubernetes.io/projected/1b113f24-11b8-4720-87a6-ccae8b3f888e-kube-api-access-vwkkt\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz\" (UID: \"1b113f24-11b8-4720-87a6-ccae8b3f888e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.357081 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" Dec 04 09:51:04 crc kubenswrapper[4707]: I1204 09:51:04.764118 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz"] Dec 04 09:51:05 crc kubenswrapper[4707]: I1204 09:51:05.160003 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" event={"ID":"1b113f24-11b8-4720-87a6-ccae8b3f888e","Type":"ContainerStarted","Data":"87ab69ad49214643259db27eda7a6844ca52cb32db5db36cd253e0d12ca146e3"} Dec 04 09:51:05 crc kubenswrapper[4707]: I1204 09:51:05.160107 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" event={"ID":"1b113f24-11b8-4720-87a6-ccae8b3f888e","Type":"ContainerStarted","Data":"cc4035dfb248d25e50cbb2de7c0a5be2a1f55f2dd3108b6822c751ba36d0463b"} Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.167576 4707 generic.go:334] "Generic (PLEG): container finished" podID="1b113f24-11b8-4720-87a6-ccae8b3f888e" containerID="87ab69ad49214643259db27eda7a6844ca52cb32db5db36cd253e0d12ca146e3" exitCode=0 Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.167704 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" event={"ID":"1b113f24-11b8-4720-87a6-ccae8b3f888e","Type":"ContainerDied","Data":"87ab69ad49214643259db27eda7a6844ca52cb32db5db36cd253e0d12ca146e3"} Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.171190 4707 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.401831 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b6v7l"] Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.404577 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.424614 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b6v7l"] Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.505489 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afc98ce4-a504-4788-8060-c74df9c37a76-catalog-content\") pod \"redhat-operators-b6v7l\" (UID: \"afc98ce4-a504-4788-8060-c74df9c37a76\") " pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.505560 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcvks\" (UniqueName: \"kubernetes.io/projected/afc98ce4-a504-4788-8060-c74df9c37a76-kube-api-access-qcvks\") pod \"redhat-operators-b6v7l\" (UID: \"afc98ce4-a504-4788-8060-c74df9c37a76\") " pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.505659 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afc98ce4-a504-4788-8060-c74df9c37a76-utilities\") pod \"redhat-operators-b6v7l\" (UID: \"afc98ce4-a504-4788-8060-c74df9c37a76\") " pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.606818 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afc98ce4-a504-4788-8060-c74df9c37a76-utilities\") pod \"redhat-operators-b6v7l\" (UID: \"afc98ce4-a504-4788-8060-c74df9c37a76\") " pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.606885 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afc98ce4-a504-4788-8060-c74df9c37a76-catalog-content\") pod \"redhat-operators-b6v7l\" (UID: \"afc98ce4-a504-4788-8060-c74df9c37a76\") " pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.606922 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcvks\" (UniqueName: \"kubernetes.io/projected/afc98ce4-a504-4788-8060-c74df9c37a76-kube-api-access-qcvks\") pod \"redhat-operators-b6v7l\" (UID: \"afc98ce4-a504-4788-8060-c74df9c37a76\") " pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.607384 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afc98ce4-a504-4788-8060-c74df9c37a76-catalog-content\") pod \"redhat-operators-b6v7l\" (UID: \"afc98ce4-a504-4788-8060-c74df9c37a76\") " pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.607518 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afc98ce4-a504-4788-8060-c74df9c37a76-utilities\") pod \"redhat-operators-b6v7l\" (UID: \"afc98ce4-a504-4788-8060-c74df9c37a76\") " pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.629500 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcvks\" (UniqueName: \"kubernetes.io/projected/afc98ce4-a504-4788-8060-c74df9c37a76-kube-api-access-qcvks\") pod \"redhat-operators-b6v7l\" (UID: \"afc98ce4-a504-4788-8060-c74df9c37a76\") " pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.722463 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:06 crc kubenswrapper[4707]: I1204 09:51:06.909740 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b6v7l"] Dec 04 09:51:06 crc kubenswrapper[4707]: W1204 09:51:06.922286 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podafc98ce4_a504_4788_8060_c74df9c37a76.slice/crio-bf3592a66358b13e7418b7f52b85f6430617f9cae662a8f836770b42aabcf20b WatchSource:0}: Error finding container bf3592a66358b13e7418b7f52b85f6430617f9cae662a8f836770b42aabcf20b: Status 404 returned error can't find the container with id bf3592a66358b13e7418b7f52b85f6430617f9cae662a8f836770b42aabcf20b Dec 04 09:51:07 crc kubenswrapper[4707]: I1204 09:51:07.175108 4707 generic.go:334] "Generic (PLEG): container finished" podID="afc98ce4-a504-4788-8060-c74df9c37a76" containerID="af0ea570a60928af92d85c1572865f1243e60415865fad4d23029a74ea5e8406" exitCode=0 Dec 04 09:51:07 crc kubenswrapper[4707]: I1204 09:51:07.175469 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6v7l" event={"ID":"afc98ce4-a504-4788-8060-c74df9c37a76","Type":"ContainerDied","Data":"af0ea570a60928af92d85c1572865f1243e60415865fad4d23029a74ea5e8406"} Dec 04 09:51:07 crc kubenswrapper[4707]: I1204 09:51:07.175503 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6v7l" event={"ID":"afc98ce4-a504-4788-8060-c74df9c37a76","Type":"ContainerStarted","Data":"bf3592a66358b13e7418b7f52b85f6430617f9cae662a8f836770b42aabcf20b"} Dec 04 09:51:08 crc kubenswrapper[4707]: I1204 09:51:08.180381 4707 generic.go:334] "Generic (PLEG): container finished" podID="1b113f24-11b8-4720-87a6-ccae8b3f888e" containerID="d89e3304b6b0ee41287c24099237a37f3ab0e227356d6768e44bb3a2a985f9f9" exitCode=0 Dec 04 09:51:08 crc kubenswrapper[4707]: I1204 09:51:08.180436 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" event={"ID":"1b113f24-11b8-4720-87a6-ccae8b3f888e","Type":"ContainerDied","Data":"d89e3304b6b0ee41287c24099237a37f3ab0e227356d6768e44bb3a2a985f9f9"} Dec 04 09:51:08 crc kubenswrapper[4707]: I1204 09:51:08.182198 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6v7l" event={"ID":"afc98ce4-a504-4788-8060-c74df9c37a76","Type":"ContainerStarted","Data":"d0679b33432c02824229e705c7a7f9db69198347e7a9ca7581d013816df69f49"} Dec 04 09:51:09 crc kubenswrapper[4707]: I1204 09:51:09.188500 4707 generic.go:334] "Generic (PLEG): container finished" podID="1b113f24-11b8-4720-87a6-ccae8b3f888e" containerID="d1b9ce810d7efaa3be234c8d7975034d5a808a23ab15de89ae6a188028023a59" exitCode=0 Dec 04 09:51:09 crc kubenswrapper[4707]: I1204 09:51:09.188587 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" event={"ID":"1b113f24-11b8-4720-87a6-ccae8b3f888e","Type":"ContainerDied","Data":"d1b9ce810d7efaa3be234c8d7975034d5a808a23ab15de89ae6a188028023a59"} Dec 04 09:51:09 crc kubenswrapper[4707]: I1204 09:51:09.717046 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cmghw" Dec 04 09:51:10 crc kubenswrapper[4707]: I1204 09:51:10.197743 4707 generic.go:334] "Generic (PLEG): container finished" podID="afc98ce4-a504-4788-8060-c74df9c37a76" containerID="d0679b33432c02824229e705c7a7f9db69198347e7a9ca7581d013816df69f49" exitCode=0 Dec 04 09:51:10 crc kubenswrapper[4707]: I1204 09:51:10.197860 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6v7l" event={"ID":"afc98ce4-a504-4788-8060-c74df9c37a76","Type":"ContainerDied","Data":"d0679b33432c02824229e705c7a7f9db69198347e7a9ca7581d013816df69f49"} Dec 04 09:51:10 crc kubenswrapper[4707]: I1204 09:51:10.457497 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" Dec 04 09:51:10 crc kubenswrapper[4707]: I1204 09:51:10.558221 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1b113f24-11b8-4720-87a6-ccae8b3f888e-bundle\") pod \"1b113f24-11b8-4720-87a6-ccae8b3f888e\" (UID: \"1b113f24-11b8-4720-87a6-ccae8b3f888e\") " Dec 04 09:51:10 crc kubenswrapper[4707]: I1204 09:51:10.558415 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwkkt\" (UniqueName: \"kubernetes.io/projected/1b113f24-11b8-4720-87a6-ccae8b3f888e-kube-api-access-vwkkt\") pod \"1b113f24-11b8-4720-87a6-ccae8b3f888e\" (UID: \"1b113f24-11b8-4720-87a6-ccae8b3f888e\") " Dec 04 09:51:10 crc kubenswrapper[4707]: I1204 09:51:10.558436 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1b113f24-11b8-4720-87a6-ccae8b3f888e-util\") pod \"1b113f24-11b8-4720-87a6-ccae8b3f888e\" (UID: \"1b113f24-11b8-4720-87a6-ccae8b3f888e\") " Dec 04 09:51:10 crc kubenswrapper[4707]: I1204 09:51:10.559217 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b113f24-11b8-4720-87a6-ccae8b3f888e-bundle" (OuterVolumeSpecName: "bundle") pod "1b113f24-11b8-4720-87a6-ccae8b3f888e" (UID: "1b113f24-11b8-4720-87a6-ccae8b3f888e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:51:10 crc kubenswrapper[4707]: I1204 09:51:10.563961 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b113f24-11b8-4720-87a6-ccae8b3f888e-kube-api-access-vwkkt" (OuterVolumeSpecName: "kube-api-access-vwkkt") pod "1b113f24-11b8-4720-87a6-ccae8b3f888e" (UID: "1b113f24-11b8-4720-87a6-ccae8b3f888e"). InnerVolumeSpecName "kube-api-access-vwkkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:51:10 crc kubenswrapper[4707]: I1204 09:51:10.570791 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b113f24-11b8-4720-87a6-ccae8b3f888e-util" (OuterVolumeSpecName: "util") pod "1b113f24-11b8-4720-87a6-ccae8b3f888e" (UID: "1b113f24-11b8-4720-87a6-ccae8b3f888e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:51:10 crc kubenswrapper[4707]: I1204 09:51:10.659541 4707 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1b113f24-11b8-4720-87a6-ccae8b3f888e-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:51:10 crc kubenswrapper[4707]: I1204 09:51:10.659587 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwkkt\" (UniqueName: \"kubernetes.io/projected/1b113f24-11b8-4720-87a6-ccae8b3f888e-kube-api-access-vwkkt\") on node \"crc\" DevicePath \"\"" Dec 04 09:51:10 crc kubenswrapper[4707]: I1204 09:51:10.659602 4707 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1b113f24-11b8-4720-87a6-ccae8b3f888e-util\") on node \"crc\" DevicePath \"\"" Dec 04 09:51:11 crc kubenswrapper[4707]: I1204 09:51:11.204954 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" event={"ID":"1b113f24-11b8-4720-87a6-ccae8b3f888e","Type":"ContainerDied","Data":"cc4035dfb248d25e50cbb2de7c0a5be2a1f55f2dd3108b6822c751ba36d0463b"} Dec 04 09:51:11 crc kubenswrapper[4707]: I1204 09:51:11.205241 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc4035dfb248d25e50cbb2de7c0a5be2a1f55f2dd3108b6822c751ba36d0463b" Dec 04 09:51:11 crc kubenswrapper[4707]: I1204 09:51:11.204987 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz" Dec 04 09:51:11 crc kubenswrapper[4707]: I1204 09:51:11.207946 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6v7l" event={"ID":"afc98ce4-a504-4788-8060-c74df9c37a76","Type":"ContainerStarted","Data":"e5091ba17831f4d0da61e83d4b9c43750e26788fa035538000e6819063b67788"} Dec 04 09:51:11 crc kubenswrapper[4707]: I1204 09:51:11.226559 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b6v7l" podStartSLOduration=1.646633301 podStartE2EDuration="5.226538912s" podCreationTimestamp="2025-12-04 09:51:06 +0000 UTC" firstStartedPulling="2025-12-04 09:51:07.176986837 +0000 UTC m=+766.612809344" lastFinishedPulling="2025-12-04 09:51:10.756892448 +0000 UTC m=+770.192714955" observedRunningTime="2025-12-04 09:51:11.224473386 +0000 UTC m=+770.660295933" watchObservedRunningTime="2025-12-04 09:51:11.226538912 +0000 UTC m=+770.662361429" Dec 04 09:51:16 crc kubenswrapper[4707]: I1204 09:51:16.723756 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:16 crc kubenswrapper[4707]: I1204 09:51:16.724056 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:17 crc kubenswrapper[4707]: I1204 09:51:17.772862 4707 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-b6v7l" podUID="afc98ce4-a504-4788-8060-c74df9c37a76" containerName="registry-server" probeResult="failure" output=< Dec 04 09:51:17 crc kubenswrapper[4707]: timeout: failed to connect service ":50051" within 1s Dec 04 09:51:17 crc kubenswrapper[4707]: > Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.407679 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls"] Dec 04 09:51:21 crc kubenswrapper[4707]: E1204 09:51:21.408384 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b113f24-11b8-4720-87a6-ccae8b3f888e" containerName="util" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.408398 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b113f24-11b8-4720-87a6-ccae8b3f888e" containerName="util" Dec 04 09:51:21 crc kubenswrapper[4707]: E1204 09:51:21.408409 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b113f24-11b8-4720-87a6-ccae8b3f888e" containerName="pull" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.408414 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b113f24-11b8-4720-87a6-ccae8b3f888e" containerName="pull" Dec 04 09:51:21 crc kubenswrapper[4707]: E1204 09:51:21.408426 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b113f24-11b8-4720-87a6-ccae8b3f888e" containerName="extract" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.408431 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b113f24-11b8-4720-87a6-ccae8b3f888e" containerName="extract" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.408539 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b113f24-11b8-4720-87a6-ccae8b3f888e" containerName="extract" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.408907 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.411264 4707 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-5q5b4" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.411875 4707 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.412716 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.412743 4707 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.413308 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.460470 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls"] Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.531956 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/41bddb2a-8c9f-42a0-a450-06a9e755c211-webhook-cert\") pod \"metallb-operator-controller-manager-5657775d6b-zhwls\" (UID: \"41bddb2a-8c9f-42a0-a450-06a9e755c211\") " pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.532029 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xwj7\" (UniqueName: \"kubernetes.io/projected/41bddb2a-8c9f-42a0-a450-06a9e755c211-kube-api-access-5xwj7\") pod \"metallb-operator-controller-manager-5657775d6b-zhwls\" (UID: \"41bddb2a-8c9f-42a0-a450-06a9e755c211\") " pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.532195 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/41bddb2a-8c9f-42a0-a450-06a9e755c211-apiservice-cert\") pod \"metallb-operator-controller-manager-5657775d6b-zhwls\" (UID: \"41bddb2a-8c9f-42a0-a450-06a9e755c211\") " pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.633458 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/41bddb2a-8c9f-42a0-a450-06a9e755c211-webhook-cert\") pod \"metallb-operator-controller-manager-5657775d6b-zhwls\" (UID: \"41bddb2a-8c9f-42a0-a450-06a9e755c211\") " pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.633523 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xwj7\" (UniqueName: \"kubernetes.io/projected/41bddb2a-8c9f-42a0-a450-06a9e755c211-kube-api-access-5xwj7\") pod \"metallb-operator-controller-manager-5657775d6b-zhwls\" (UID: \"41bddb2a-8c9f-42a0-a450-06a9e755c211\") " pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.633566 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/41bddb2a-8c9f-42a0-a450-06a9e755c211-apiservice-cert\") pod \"metallb-operator-controller-manager-5657775d6b-zhwls\" (UID: \"41bddb2a-8c9f-42a0-a450-06a9e755c211\") " pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.639174 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/41bddb2a-8c9f-42a0-a450-06a9e755c211-apiservice-cert\") pod \"metallb-operator-controller-manager-5657775d6b-zhwls\" (UID: \"41bddb2a-8c9f-42a0-a450-06a9e755c211\") " pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.643391 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/41bddb2a-8c9f-42a0-a450-06a9e755c211-webhook-cert\") pod \"metallb-operator-controller-manager-5657775d6b-zhwls\" (UID: \"41bddb2a-8c9f-42a0-a450-06a9e755c211\") " pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.682173 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xwj7\" (UniqueName: \"kubernetes.io/projected/41bddb2a-8c9f-42a0-a450-06a9e755c211-kube-api-access-5xwj7\") pod \"metallb-operator-controller-manager-5657775d6b-zhwls\" (UID: \"41bddb2a-8c9f-42a0-a450-06a9e755c211\") " pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.724011 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.796445 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx"] Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.797203 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.799916 4707 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.800214 4707 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.804297 4707 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-qfdtm" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.814564 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx"] Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.835405 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5d4cf\" (UniqueName: \"kubernetes.io/projected/6a9e4f53-c751-4994-8e44-6bcc07b40dc8-kube-api-access-5d4cf\") pod \"metallb-operator-webhook-server-fcf969487-8d9dx\" (UID: \"6a9e4f53-c751-4994-8e44-6bcc07b40dc8\") " pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.835469 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6a9e4f53-c751-4994-8e44-6bcc07b40dc8-apiservice-cert\") pod \"metallb-operator-webhook-server-fcf969487-8d9dx\" (UID: \"6a9e4f53-c751-4994-8e44-6bcc07b40dc8\") " pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.835514 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6a9e4f53-c751-4994-8e44-6bcc07b40dc8-webhook-cert\") pod \"metallb-operator-webhook-server-fcf969487-8d9dx\" (UID: \"6a9e4f53-c751-4994-8e44-6bcc07b40dc8\") " pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.937217 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6a9e4f53-c751-4994-8e44-6bcc07b40dc8-apiservice-cert\") pod \"metallb-operator-webhook-server-fcf969487-8d9dx\" (UID: \"6a9e4f53-c751-4994-8e44-6bcc07b40dc8\") " pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.937303 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6a9e4f53-c751-4994-8e44-6bcc07b40dc8-webhook-cert\") pod \"metallb-operator-webhook-server-fcf969487-8d9dx\" (UID: \"6a9e4f53-c751-4994-8e44-6bcc07b40dc8\") " pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.937377 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5d4cf\" (UniqueName: \"kubernetes.io/projected/6a9e4f53-c751-4994-8e44-6bcc07b40dc8-kube-api-access-5d4cf\") pod \"metallb-operator-webhook-server-fcf969487-8d9dx\" (UID: \"6a9e4f53-c751-4994-8e44-6bcc07b40dc8\") " pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.941074 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6a9e4f53-c751-4994-8e44-6bcc07b40dc8-apiservice-cert\") pod \"metallb-operator-webhook-server-fcf969487-8d9dx\" (UID: \"6a9e4f53-c751-4994-8e44-6bcc07b40dc8\") " pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.943634 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6a9e4f53-c751-4994-8e44-6bcc07b40dc8-webhook-cert\") pod \"metallb-operator-webhook-server-fcf969487-8d9dx\" (UID: \"6a9e4f53-c751-4994-8e44-6bcc07b40dc8\") " pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" Dec 04 09:51:21 crc kubenswrapper[4707]: I1204 09:51:21.954461 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5d4cf\" (UniqueName: \"kubernetes.io/projected/6a9e4f53-c751-4994-8e44-6bcc07b40dc8-kube-api-access-5d4cf\") pod \"metallb-operator-webhook-server-fcf969487-8d9dx\" (UID: \"6a9e4f53-c751-4994-8e44-6bcc07b40dc8\") " pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" Dec 04 09:51:22 crc kubenswrapper[4707]: I1204 09:51:22.114694 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" Dec 04 09:51:22 crc kubenswrapper[4707]: I1204 09:51:22.281420 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls"] Dec 04 09:51:22 crc kubenswrapper[4707]: I1204 09:51:22.616299 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" event={"ID":"41bddb2a-8c9f-42a0-a450-06a9e755c211","Type":"ContainerStarted","Data":"bef9cdfda629583c3c1d1e6d4f776bdd2b6dd23a39dd612546d3db465b471eb6"} Dec 04 09:51:22 crc kubenswrapper[4707]: I1204 09:51:22.758919 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx"] Dec 04 09:51:22 crc kubenswrapper[4707]: W1204 09:51:22.759413 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a9e4f53_c751_4994_8e44_6bcc07b40dc8.slice/crio-178e8df52fe859266a904b7e65b62107b832edb7bfaa0745c6f9779e5d8bcf0e WatchSource:0}: Error finding container 178e8df52fe859266a904b7e65b62107b832edb7bfaa0745c6f9779e5d8bcf0e: Status 404 returned error can't find the container with id 178e8df52fe859266a904b7e65b62107b832edb7bfaa0745c6f9779e5d8bcf0e Dec 04 09:51:23 crc kubenswrapper[4707]: I1204 09:51:23.622088 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" event={"ID":"6a9e4f53-c751-4994-8e44-6bcc07b40dc8","Type":"ContainerStarted","Data":"178e8df52fe859266a904b7e65b62107b832edb7bfaa0745c6f9779e5d8bcf0e"} Dec 04 09:51:26 crc kubenswrapper[4707]: I1204 09:51:26.824498 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:26 crc kubenswrapper[4707]: I1204 09:51:26.876093 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:27 crc kubenswrapper[4707]: I1204 09:51:27.084051 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b6v7l"] Dec 04 09:51:28 crc kubenswrapper[4707]: I1204 09:51:28.651361 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b6v7l" podUID="afc98ce4-a504-4788-8060-c74df9c37a76" containerName="registry-server" containerID="cri-o://e5091ba17831f4d0da61e83d4b9c43750e26788fa035538000e6819063b67788" gracePeriod=2 Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.274066 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.340746 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcvks\" (UniqueName: \"kubernetes.io/projected/afc98ce4-a504-4788-8060-c74df9c37a76-kube-api-access-qcvks\") pod \"afc98ce4-a504-4788-8060-c74df9c37a76\" (UID: \"afc98ce4-a504-4788-8060-c74df9c37a76\") " Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.340800 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afc98ce4-a504-4788-8060-c74df9c37a76-utilities\") pod \"afc98ce4-a504-4788-8060-c74df9c37a76\" (UID: \"afc98ce4-a504-4788-8060-c74df9c37a76\") " Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.340827 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afc98ce4-a504-4788-8060-c74df9c37a76-catalog-content\") pod \"afc98ce4-a504-4788-8060-c74df9c37a76\" (UID: \"afc98ce4-a504-4788-8060-c74df9c37a76\") " Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.345228 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afc98ce4-a504-4788-8060-c74df9c37a76-utilities" (OuterVolumeSpecName: "utilities") pod "afc98ce4-a504-4788-8060-c74df9c37a76" (UID: "afc98ce4-a504-4788-8060-c74df9c37a76"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.348928 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afc98ce4-a504-4788-8060-c74df9c37a76-kube-api-access-qcvks" (OuterVolumeSpecName: "kube-api-access-qcvks") pod "afc98ce4-a504-4788-8060-c74df9c37a76" (UID: "afc98ce4-a504-4788-8060-c74df9c37a76"). InnerVolumeSpecName "kube-api-access-qcvks". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.442823 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcvks\" (UniqueName: \"kubernetes.io/projected/afc98ce4-a504-4788-8060-c74df9c37a76-kube-api-access-qcvks\") on node \"crc\" DevicePath \"\"" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.442856 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afc98ce4-a504-4788-8060-c74df9c37a76-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.444414 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afc98ce4-a504-4788-8060-c74df9c37a76-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "afc98ce4-a504-4788-8060-c74df9c37a76" (UID: "afc98ce4-a504-4788-8060-c74df9c37a76"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.544106 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afc98ce4-a504-4788-8060-c74df9c37a76-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.657679 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" event={"ID":"41bddb2a-8c9f-42a0-a450-06a9e755c211","Type":"ContainerStarted","Data":"80dc513d0c402eed0200cd9f0b337965aca402e9c242fab1003dd630f1c537d7"} Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.657765 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.659751 4707 generic.go:334] "Generic (PLEG): container finished" podID="afc98ce4-a504-4788-8060-c74df9c37a76" containerID="e5091ba17831f4d0da61e83d4b9c43750e26788fa035538000e6819063b67788" exitCode=0 Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.659828 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6v7l" event={"ID":"afc98ce4-a504-4788-8060-c74df9c37a76","Type":"ContainerDied","Data":"e5091ba17831f4d0da61e83d4b9c43750e26788fa035538000e6819063b67788"} Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.659833 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b6v7l" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.659863 4707 scope.go:117] "RemoveContainer" containerID="e5091ba17831f4d0da61e83d4b9c43750e26788fa035538000e6819063b67788" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.659853 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6v7l" event={"ID":"afc98ce4-a504-4788-8060-c74df9c37a76","Type":"ContainerDied","Data":"bf3592a66358b13e7418b7f52b85f6430617f9cae662a8f836770b42aabcf20b"} Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.661378 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" event={"ID":"6a9e4f53-c751-4994-8e44-6bcc07b40dc8","Type":"ContainerStarted","Data":"920b6232a73a3a04b9c75c714ea35a56593bd485f56826ff28df5156ef0e4dcd"} Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.661749 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.681136 4707 scope.go:117] "RemoveContainer" containerID="d0679b33432c02824229e705c7a7f9db69198347e7a9ca7581d013816df69f49" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.681375 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" podStartSLOduration=2.039217667 podStartE2EDuration="8.6813525s" podCreationTimestamp="2025-12-04 09:51:21 +0000 UTC" firstStartedPulling="2025-12-04 09:51:22.301052196 +0000 UTC m=+781.736874703" lastFinishedPulling="2025-12-04 09:51:28.943187019 +0000 UTC m=+788.379009536" observedRunningTime="2025-12-04 09:51:29.676848054 +0000 UTC m=+789.112670581" watchObservedRunningTime="2025-12-04 09:51:29.6813525 +0000 UTC m=+789.117175007" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.693392 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b6v7l"] Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.698240 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b6v7l"] Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.701247 4707 scope.go:117] "RemoveContainer" containerID="af0ea570a60928af92d85c1572865f1243e60415865fad4d23029a74ea5e8406" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.714046 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" podStartSLOduration=2.51589695 podStartE2EDuration="8.714024027s" podCreationTimestamp="2025-12-04 09:51:21 +0000 UTC" firstStartedPulling="2025-12-04 09:51:22.762591238 +0000 UTC m=+782.198413745" lastFinishedPulling="2025-12-04 09:51:28.960718315 +0000 UTC m=+788.396540822" observedRunningTime="2025-12-04 09:51:29.713106657 +0000 UTC m=+789.148929164" watchObservedRunningTime="2025-12-04 09:51:29.714024027 +0000 UTC m=+789.149846534" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.717440 4707 scope.go:117] "RemoveContainer" containerID="e5091ba17831f4d0da61e83d4b9c43750e26788fa035538000e6819063b67788" Dec 04 09:51:29 crc kubenswrapper[4707]: E1204 09:51:29.717982 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5091ba17831f4d0da61e83d4b9c43750e26788fa035538000e6819063b67788\": container with ID starting with e5091ba17831f4d0da61e83d4b9c43750e26788fa035538000e6819063b67788 not found: ID does not exist" containerID="e5091ba17831f4d0da61e83d4b9c43750e26788fa035538000e6819063b67788" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.718031 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5091ba17831f4d0da61e83d4b9c43750e26788fa035538000e6819063b67788"} err="failed to get container status \"e5091ba17831f4d0da61e83d4b9c43750e26788fa035538000e6819063b67788\": rpc error: code = NotFound desc = could not find container \"e5091ba17831f4d0da61e83d4b9c43750e26788fa035538000e6819063b67788\": container with ID starting with e5091ba17831f4d0da61e83d4b9c43750e26788fa035538000e6819063b67788 not found: ID does not exist" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.718061 4707 scope.go:117] "RemoveContainer" containerID="d0679b33432c02824229e705c7a7f9db69198347e7a9ca7581d013816df69f49" Dec 04 09:51:29 crc kubenswrapper[4707]: E1204 09:51:29.718380 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0679b33432c02824229e705c7a7f9db69198347e7a9ca7581d013816df69f49\": container with ID starting with d0679b33432c02824229e705c7a7f9db69198347e7a9ca7581d013816df69f49 not found: ID does not exist" containerID="d0679b33432c02824229e705c7a7f9db69198347e7a9ca7581d013816df69f49" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.718409 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0679b33432c02824229e705c7a7f9db69198347e7a9ca7581d013816df69f49"} err="failed to get container status \"d0679b33432c02824229e705c7a7f9db69198347e7a9ca7581d013816df69f49\": rpc error: code = NotFound desc = could not find container \"d0679b33432c02824229e705c7a7f9db69198347e7a9ca7581d013816df69f49\": container with ID starting with d0679b33432c02824229e705c7a7f9db69198347e7a9ca7581d013816df69f49 not found: ID does not exist" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.718430 4707 scope.go:117] "RemoveContainer" containerID="af0ea570a60928af92d85c1572865f1243e60415865fad4d23029a74ea5e8406" Dec 04 09:51:29 crc kubenswrapper[4707]: E1204 09:51:29.719167 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af0ea570a60928af92d85c1572865f1243e60415865fad4d23029a74ea5e8406\": container with ID starting with af0ea570a60928af92d85c1572865f1243e60415865fad4d23029a74ea5e8406 not found: ID does not exist" containerID="af0ea570a60928af92d85c1572865f1243e60415865fad4d23029a74ea5e8406" Dec 04 09:51:29 crc kubenswrapper[4707]: I1204 09:51:29.719190 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af0ea570a60928af92d85c1572865f1243e60415865fad4d23029a74ea5e8406"} err="failed to get container status \"af0ea570a60928af92d85c1572865f1243e60415865fad4d23029a74ea5e8406\": rpc error: code = NotFound desc = could not find container \"af0ea570a60928af92d85c1572865f1243e60415865fad4d23029a74ea5e8406\": container with ID starting with af0ea570a60928af92d85c1572865f1243e60415865fad4d23029a74ea5e8406 not found: ID does not exist" Dec 04 09:51:30 crc kubenswrapper[4707]: I1204 09:51:30.817067 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:51:30 crc kubenswrapper[4707]: I1204 09:51:30.817245 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:51:30 crc kubenswrapper[4707]: I1204 09:51:30.850980 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afc98ce4-a504-4788-8060-c74df9c37a76" path="/var/lib/kubelet/pods/afc98ce4-a504-4788-8060-c74df9c37a76/volumes" Dec 04 09:51:42 crc kubenswrapper[4707]: I1204 09:51:42.122830 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-fcf969487-8d9dx" Dec 04 09:52:00 crc kubenswrapper[4707]: I1204 09:52:00.816704 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:52:00 crc kubenswrapper[4707]: I1204 09:52:00.817246 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:52:01 crc kubenswrapper[4707]: I1204 09:52:01.725974 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5657775d6b-zhwls" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.518460 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-r9wp9"] Dec 04 09:52:02 crc kubenswrapper[4707]: E1204 09:52:02.518721 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afc98ce4-a504-4788-8060-c74df9c37a76" containerName="registry-server" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.518738 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="afc98ce4-a504-4788-8060-c74df9c37a76" containerName="registry-server" Dec 04 09:52:02 crc kubenswrapper[4707]: E1204 09:52:02.518768 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afc98ce4-a504-4788-8060-c74df9c37a76" containerName="extract-utilities" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.518777 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="afc98ce4-a504-4788-8060-c74df9c37a76" containerName="extract-utilities" Dec 04 09:52:02 crc kubenswrapper[4707]: E1204 09:52:02.518788 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afc98ce4-a504-4788-8060-c74df9c37a76" containerName="extract-content" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.518797 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="afc98ce4-a504-4788-8060-c74df9c37a76" containerName="extract-content" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.518916 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="afc98ce4-a504-4788-8060-c74df9c37a76" containerName="registry-server" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.521094 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.524918 4707 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.524934 4707 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-jfs5t" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.525033 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.530957 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn"] Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.531759 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.533942 4707 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.546618 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn"] Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.563272 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/71b37f42-f459-4829-8112-3db6b09fc06d-frr-startup\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.563325 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/71b37f42-f459-4829-8112-3db6b09fc06d-metrics\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.563607 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/31fd648a-f639-45c0-a30c-77afc9cafedc-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-vxncn\" (UID: \"31fd648a-f639-45c0-a30c-77afc9cafedc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.563656 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/71b37f42-f459-4829-8112-3db6b09fc06d-frr-conf\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.563678 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/71b37f42-f459-4829-8112-3db6b09fc06d-metrics-certs\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.563710 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/71b37f42-f459-4829-8112-3db6b09fc06d-frr-sockets\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.563866 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhdvp\" (UniqueName: \"kubernetes.io/projected/31fd648a-f639-45c0-a30c-77afc9cafedc-kube-api-access-nhdvp\") pod \"frr-k8s-webhook-server-7fcb986d4-vxncn\" (UID: \"31fd648a-f639-45c0-a30c-77afc9cafedc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.563953 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/71b37f42-f459-4829-8112-3db6b09fc06d-reloader\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.563992 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfwfl\" (UniqueName: \"kubernetes.io/projected/71b37f42-f459-4829-8112-3db6b09fc06d-kube-api-access-mfwfl\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.609084 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-599tz"] Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.609986 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-599tz" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.611685 4707 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.611713 4707 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.611768 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.612225 4707 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-rs6tg" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.624568 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-kv6jh"] Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.625618 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-kv6jh" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.632205 4707 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.647520 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-kv6jh"] Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.664665 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/52f06bc6-db65-4283-961c-3bee70be7363-metallb-excludel2\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.664714 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/31fd648a-f639-45c0-a30c-77afc9cafedc-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-vxncn\" (UID: \"31fd648a-f639-45c0-a30c-77afc9cafedc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.664734 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hx86k\" (UniqueName: \"kubernetes.io/projected/52f06bc6-db65-4283-961c-3bee70be7363-kube-api-access-hx86k\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.664798 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-metrics-certs\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.664827 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/71b37f42-f459-4829-8112-3db6b09fc06d-frr-conf\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.664846 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/71b37f42-f459-4829-8112-3db6b09fc06d-metrics-certs\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.664870 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/058a219f-7ca4-486e-87e2-7406ad069250-metrics-certs\") pod \"controller-f8648f98b-kv6jh\" (UID: \"058a219f-7ca4-486e-87e2-7406ad069250\") " pod="metallb-system/controller-f8648f98b-kv6jh" Dec 04 09:52:02 crc kubenswrapper[4707]: E1204 09:52:02.664880 4707 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.664896 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/71b37f42-f459-4829-8112-3db6b09fc06d-frr-sockets\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: E1204 09:52:02.664961 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/31fd648a-f639-45c0-a30c-77afc9cafedc-cert podName:31fd648a-f639-45c0-a30c-77afc9cafedc nodeName:}" failed. No retries permitted until 2025-12-04 09:52:03.164940866 +0000 UTC m=+822.600763373 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/31fd648a-f639-45c0-a30c-77afc9cafedc-cert") pod "frr-k8s-webhook-server-7fcb986d4-vxncn" (UID: "31fd648a-f639-45c0-a30c-77afc9cafedc") : secret "frr-k8s-webhook-server-cert" not found Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.665010 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmqs7\" (UniqueName: \"kubernetes.io/projected/058a219f-7ca4-486e-87e2-7406ad069250-kube-api-access-gmqs7\") pod \"controller-f8648f98b-kv6jh\" (UID: \"058a219f-7ca4-486e-87e2-7406ad069250\") " pod="metallb-system/controller-f8648f98b-kv6jh" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.665070 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-memberlist\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.665133 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhdvp\" (UniqueName: \"kubernetes.io/projected/31fd648a-f639-45c0-a30c-77afc9cafedc-kube-api-access-nhdvp\") pod \"frr-k8s-webhook-server-7fcb986d4-vxncn\" (UID: \"31fd648a-f639-45c0-a30c-77afc9cafedc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.665189 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/71b37f42-f459-4829-8112-3db6b09fc06d-reloader\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.665226 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfwfl\" (UniqueName: \"kubernetes.io/projected/71b37f42-f459-4829-8112-3db6b09fc06d-kube-api-access-mfwfl\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.665272 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/71b37f42-f459-4829-8112-3db6b09fc06d-frr-startup\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.665298 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/71b37f42-f459-4829-8112-3db6b09fc06d-metrics\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.665323 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/058a219f-7ca4-486e-87e2-7406ad069250-cert\") pod \"controller-f8648f98b-kv6jh\" (UID: \"058a219f-7ca4-486e-87e2-7406ad069250\") " pod="metallb-system/controller-f8648f98b-kv6jh" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.665367 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/71b37f42-f459-4829-8112-3db6b09fc06d-frr-conf\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.665474 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/71b37f42-f459-4829-8112-3db6b09fc06d-frr-sockets\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.665718 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/71b37f42-f459-4829-8112-3db6b09fc06d-reloader\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.665781 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/71b37f42-f459-4829-8112-3db6b09fc06d-metrics\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.666287 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/71b37f42-f459-4829-8112-3db6b09fc06d-frr-startup\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.678096 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/71b37f42-f459-4829-8112-3db6b09fc06d-metrics-certs\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.682197 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhdvp\" (UniqueName: \"kubernetes.io/projected/31fd648a-f639-45c0-a30c-77afc9cafedc-kube-api-access-nhdvp\") pod \"frr-k8s-webhook-server-7fcb986d4-vxncn\" (UID: \"31fd648a-f639-45c0-a30c-77afc9cafedc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.688887 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfwfl\" (UniqueName: \"kubernetes.io/projected/71b37f42-f459-4829-8112-3db6b09fc06d-kube-api-access-mfwfl\") pod \"frr-k8s-r9wp9\" (UID: \"71b37f42-f459-4829-8112-3db6b09fc06d\") " pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.766117 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/058a219f-7ca4-486e-87e2-7406ad069250-cert\") pod \"controller-f8648f98b-kv6jh\" (UID: \"058a219f-7ca4-486e-87e2-7406ad069250\") " pod="metallb-system/controller-f8648f98b-kv6jh" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.766198 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/52f06bc6-db65-4283-961c-3bee70be7363-metallb-excludel2\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.766236 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hx86k\" (UniqueName: \"kubernetes.io/projected/52f06bc6-db65-4283-961c-3bee70be7363-kube-api-access-hx86k\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.766276 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-metrics-certs\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.766313 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/058a219f-7ca4-486e-87e2-7406ad069250-metrics-certs\") pod \"controller-f8648f98b-kv6jh\" (UID: \"058a219f-7ca4-486e-87e2-7406ad069250\") " pod="metallb-system/controller-f8648f98b-kv6jh" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.766358 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmqs7\" (UniqueName: \"kubernetes.io/projected/058a219f-7ca4-486e-87e2-7406ad069250-kube-api-access-gmqs7\") pod \"controller-f8648f98b-kv6jh\" (UID: \"058a219f-7ca4-486e-87e2-7406ad069250\") " pod="metallb-system/controller-f8648f98b-kv6jh" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.766384 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-memberlist\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:02 crc kubenswrapper[4707]: E1204 09:52:02.766445 4707 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 04 09:52:02 crc kubenswrapper[4707]: E1204 09:52:02.766483 4707 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Dec 04 09:52:02 crc kubenswrapper[4707]: E1204 09:52:02.766507 4707 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 04 09:52:02 crc kubenswrapper[4707]: E1204 09:52:02.766523 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-metrics-certs podName:52f06bc6-db65-4283-961c-3bee70be7363 nodeName:}" failed. No retries permitted until 2025-12-04 09:52:03.266502682 +0000 UTC m=+822.702325289 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-metrics-certs") pod "speaker-599tz" (UID: "52f06bc6-db65-4283-961c-3bee70be7363") : secret "speaker-certs-secret" not found Dec 04 09:52:02 crc kubenswrapper[4707]: E1204 09:52:02.766546 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/058a219f-7ca4-486e-87e2-7406ad069250-metrics-certs podName:058a219f-7ca4-486e-87e2-7406ad069250 nodeName:}" failed. No retries permitted until 2025-12-04 09:52:03.266535654 +0000 UTC m=+822.702358281 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/058a219f-7ca4-486e-87e2-7406ad069250-metrics-certs") pod "controller-f8648f98b-kv6jh" (UID: "058a219f-7ca4-486e-87e2-7406ad069250") : secret "controller-certs-secret" not found Dec 04 09:52:02 crc kubenswrapper[4707]: E1204 09:52:02.766574 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-memberlist podName:52f06bc6-db65-4283-961c-3bee70be7363 nodeName:}" failed. No retries permitted until 2025-12-04 09:52:03.266553084 +0000 UTC m=+822.702375711 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-memberlist") pod "speaker-599tz" (UID: "52f06bc6-db65-4283-961c-3bee70be7363") : secret "metallb-memberlist" not found Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.767174 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/52f06bc6-db65-4283-961c-3bee70be7363-metallb-excludel2\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.769356 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/058a219f-7ca4-486e-87e2-7406ad069250-cert\") pod \"controller-f8648f98b-kv6jh\" (UID: \"058a219f-7ca4-486e-87e2-7406ad069250\") " pod="metallb-system/controller-f8648f98b-kv6jh" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.785891 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hx86k\" (UniqueName: \"kubernetes.io/projected/52f06bc6-db65-4283-961c-3bee70be7363-kube-api-access-hx86k\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.793529 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmqs7\" (UniqueName: \"kubernetes.io/projected/058a219f-7ca4-486e-87e2-7406ad069250-kube-api-access-gmqs7\") pod \"controller-f8648f98b-kv6jh\" (UID: \"058a219f-7ca4-486e-87e2-7406ad069250\") " pod="metallb-system/controller-f8648f98b-kv6jh" Dec 04 09:52:02 crc kubenswrapper[4707]: I1204 09:52:02.836933 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:03 crc kubenswrapper[4707]: I1204 09:52:03.170650 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/31fd648a-f639-45c0-a30c-77afc9cafedc-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-vxncn\" (UID: \"31fd648a-f639-45c0-a30c-77afc9cafedc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn" Dec 04 09:52:03 crc kubenswrapper[4707]: I1204 09:52:03.174470 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/31fd648a-f639-45c0-a30c-77afc9cafedc-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-vxncn\" (UID: \"31fd648a-f639-45c0-a30c-77afc9cafedc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn" Dec 04 09:52:03 crc kubenswrapper[4707]: I1204 09:52:03.272136 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-metrics-certs\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:03 crc kubenswrapper[4707]: I1204 09:52:03.272200 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/058a219f-7ca4-486e-87e2-7406ad069250-metrics-certs\") pod \"controller-f8648f98b-kv6jh\" (UID: \"058a219f-7ca4-486e-87e2-7406ad069250\") " pod="metallb-system/controller-f8648f98b-kv6jh" Dec 04 09:52:03 crc kubenswrapper[4707]: I1204 09:52:03.272238 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-memberlist\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:03 crc kubenswrapper[4707]: E1204 09:52:03.272436 4707 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 04 09:52:03 crc kubenswrapper[4707]: E1204 09:52:03.272494 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-memberlist podName:52f06bc6-db65-4283-961c-3bee70be7363 nodeName:}" failed. No retries permitted until 2025-12-04 09:52:04.272476752 +0000 UTC m=+823.708299259 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-memberlist") pod "speaker-599tz" (UID: "52f06bc6-db65-4283-961c-3bee70be7363") : secret "metallb-memberlist" not found Dec 04 09:52:03 crc kubenswrapper[4707]: I1204 09:52:03.276614 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-metrics-certs\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:03 crc kubenswrapper[4707]: I1204 09:52:03.276783 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/058a219f-7ca4-486e-87e2-7406ad069250-metrics-certs\") pod \"controller-f8648f98b-kv6jh\" (UID: \"058a219f-7ca4-486e-87e2-7406ad069250\") " pod="metallb-system/controller-f8648f98b-kv6jh" Dec 04 09:52:03 crc kubenswrapper[4707]: I1204 09:52:03.447054 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn" Dec 04 09:52:03 crc kubenswrapper[4707]: I1204 09:52:03.539252 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-kv6jh" Dec 04 09:52:03 crc kubenswrapper[4707]: I1204 09:52:03.650204 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn"] Dec 04 09:52:03 crc kubenswrapper[4707]: W1204 09:52:03.656169 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31fd648a_f639_45c0_a30c_77afc9cafedc.slice/crio-1e93fcb83d9b80d207948885f188b694292443bab929c88192f0684150d0a745 WatchSource:0}: Error finding container 1e93fcb83d9b80d207948885f188b694292443bab929c88192f0684150d0a745: Status 404 returned error can't find the container with id 1e93fcb83d9b80d207948885f188b694292443bab929c88192f0684150d0a745 Dec 04 09:52:03 crc kubenswrapper[4707]: I1204 09:52:03.721707 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-kv6jh"] Dec 04 09:52:03 crc kubenswrapper[4707]: W1204 09:52:03.731658 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod058a219f_7ca4_486e_87e2_7406ad069250.slice/crio-38efe79bbc8458f963f42e920d7c9eaa0130709d11fd2c5894e70e5fd5109d11 WatchSource:0}: Error finding container 38efe79bbc8458f963f42e920d7c9eaa0130709d11fd2c5894e70e5fd5109d11: Status 404 returned error can't find the container with id 38efe79bbc8458f963f42e920d7c9eaa0130709d11fd2c5894e70e5fd5109d11 Dec 04 09:52:03 crc kubenswrapper[4707]: I1204 09:52:03.840011 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-kv6jh" event={"ID":"058a219f-7ca4-486e-87e2-7406ad069250","Type":"ContainerStarted","Data":"38efe79bbc8458f963f42e920d7c9eaa0130709d11fd2c5894e70e5fd5109d11"} Dec 04 09:52:03 crc kubenswrapper[4707]: I1204 09:52:03.841509 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn" event={"ID":"31fd648a-f639-45c0-a30c-77afc9cafedc","Type":"ContainerStarted","Data":"1e93fcb83d9b80d207948885f188b694292443bab929c88192f0684150d0a745"} Dec 04 09:52:03 crc kubenswrapper[4707]: I1204 09:52:03.842508 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r9wp9" event={"ID":"71b37f42-f459-4829-8112-3db6b09fc06d","Type":"ContainerStarted","Data":"d932c7a1ca05bf17d8515ee6589fe722ae32e4a80e7d899d7e5df3fc58857e1f"} Dec 04 09:52:04 crc kubenswrapper[4707]: I1204 09:52:04.285721 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-memberlist\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:04 crc kubenswrapper[4707]: I1204 09:52:04.301747 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/52f06bc6-db65-4283-961c-3bee70be7363-memberlist\") pod \"speaker-599tz\" (UID: \"52f06bc6-db65-4283-961c-3bee70be7363\") " pod="metallb-system/speaker-599tz" Dec 04 09:52:04 crc kubenswrapper[4707]: I1204 09:52:04.427036 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-599tz" Dec 04 09:52:04 crc kubenswrapper[4707]: W1204 09:52:04.446845 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod52f06bc6_db65_4283_961c_3bee70be7363.slice/crio-ffec2137c8860bf3c3a5dfdd4f2ad0cd5254cc907909d88c627b4002d064b060 WatchSource:0}: Error finding container ffec2137c8860bf3c3a5dfdd4f2ad0cd5254cc907909d88c627b4002d064b060: Status 404 returned error can't find the container with id ffec2137c8860bf3c3a5dfdd4f2ad0cd5254cc907909d88c627b4002d064b060 Dec 04 09:52:04 crc kubenswrapper[4707]: I1204 09:52:04.862452 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-599tz" event={"ID":"52f06bc6-db65-4283-961c-3bee70be7363","Type":"ContainerStarted","Data":"d3efbedae611c14915ecd01ae67d2fabe2251c637e9f651d075536308dcce10a"} Dec 04 09:52:04 crc kubenswrapper[4707]: I1204 09:52:04.862821 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-599tz" event={"ID":"52f06bc6-db65-4283-961c-3bee70be7363","Type":"ContainerStarted","Data":"ffec2137c8860bf3c3a5dfdd4f2ad0cd5254cc907909d88c627b4002d064b060"} Dec 04 09:52:04 crc kubenswrapper[4707]: I1204 09:52:04.874520 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-kv6jh" event={"ID":"058a219f-7ca4-486e-87e2-7406ad069250","Type":"ContainerStarted","Data":"d84b330d9d6c730e07f0d7afa7a7934c27191f66f902408f53780fb8c65db0fc"} Dec 04 09:52:07 crc kubenswrapper[4707]: I1204 09:52:07.922550 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-kv6jh" event={"ID":"058a219f-7ca4-486e-87e2-7406ad069250","Type":"ContainerStarted","Data":"35744abf7b11cd03334f01291084a13f5083e3e3e1eea70cee1e2a7fbee1f80a"} Dec 04 09:52:07 crc kubenswrapper[4707]: I1204 09:52:07.923175 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-kv6jh" Dec 04 09:52:07 crc kubenswrapper[4707]: I1204 09:52:07.925951 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-599tz" event={"ID":"52f06bc6-db65-4283-961c-3bee70be7363","Type":"ContainerStarted","Data":"e019f6b86a9887758646b079ba908ea846ec857cced8db21eaf33c58d9a9cafe"} Dec 04 09:52:07 crc kubenswrapper[4707]: I1204 09:52:07.926136 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-599tz" Dec 04 09:52:07 crc kubenswrapper[4707]: I1204 09:52:07.943001 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-kv6jh" podStartSLOduration=2.389404698 podStartE2EDuration="5.942986627s" podCreationTimestamp="2025-12-04 09:52:02 +0000 UTC" firstStartedPulling="2025-12-04 09:52:03.854059908 +0000 UTC m=+823.289882415" lastFinishedPulling="2025-12-04 09:52:07.407641837 +0000 UTC m=+826.843464344" observedRunningTime="2025-12-04 09:52:07.940346512 +0000 UTC m=+827.376169029" watchObservedRunningTime="2025-12-04 09:52:07.942986627 +0000 UTC m=+827.378809134" Dec 04 09:52:07 crc kubenswrapper[4707]: I1204 09:52:07.962863 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-599tz" podStartSLOduration=3.27777595 podStartE2EDuration="5.96284476s" podCreationTimestamp="2025-12-04 09:52:02 +0000 UTC" firstStartedPulling="2025-12-04 09:52:04.712427759 +0000 UTC m=+824.148250266" lastFinishedPulling="2025-12-04 09:52:07.397496569 +0000 UTC m=+826.833319076" observedRunningTime="2025-12-04 09:52:07.960895556 +0000 UTC m=+827.396718083" watchObservedRunningTime="2025-12-04 09:52:07.96284476 +0000 UTC m=+827.398667267" Dec 04 09:52:11 crc kubenswrapper[4707]: I1204 09:52:11.958525 4707 generic.go:334] "Generic (PLEG): container finished" podID="71b37f42-f459-4829-8112-3db6b09fc06d" containerID="fdc52ed2b9ed9f795d9dccd11a495a924b4bfb6b143bba852e3c72768878905f" exitCode=0 Dec 04 09:52:11 crc kubenswrapper[4707]: I1204 09:52:11.958625 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r9wp9" event={"ID":"71b37f42-f459-4829-8112-3db6b09fc06d","Type":"ContainerDied","Data":"fdc52ed2b9ed9f795d9dccd11a495a924b4bfb6b143bba852e3c72768878905f"} Dec 04 09:52:11 crc kubenswrapper[4707]: I1204 09:52:11.962026 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn" event={"ID":"31fd648a-f639-45c0-a30c-77afc9cafedc","Type":"ContainerStarted","Data":"fb64d2a78f607fbf1db9263ebbe6f1fed62041d7dbeb9591563f4f8ebd68533d"} Dec 04 09:52:11 crc kubenswrapper[4707]: I1204 09:52:11.962192 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn" Dec 04 09:52:12 crc kubenswrapper[4707]: I1204 09:52:11.999790 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn" podStartSLOduration=1.903399074 podStartE2EDuration="9.999768046s" podCreationTimestamp="2025-12-04 09:52:02 +0000 UTC" firstStartedPulling="2025-12-04 09:52:03.658299084 +0000 UTC m=+823.094121591" lastFinishedPulling="2025-12-04 09:52:11.754668056 +0000 UTC m=+831.190490563" observedRunningTime="2025-12-04 09:52:11.99556904 +0000 UTC m=+831.431391557" watchObservedRunningTime="2025-12-04 09:52:11.999768046 +0000 UTC m=+831.435590553" Dec 04 09:52:12 crc kubenswrapper[4707]: I1204 09:52:12.970228 4707 generic.go:334] "Generic (PLEG): container finished" podID="71b37f42-f459-4829-8112-3db6b09fc06d" containerID="d16a7298c02df5ca3d50603a76ef93af676b1ab47ff0ff3c778916410f1b6344" exitCode=0 Dec 04 09:52:12 crc kubenswrapper[4707]: I1204 09:52:12.970259 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r9wp9" event={"ID":"71b37f42-f459-4829-8112-3db6b09fc06d","Type":"ContainerDied","Data":"d16a7298c02df5ca3d50603a76ef93af676b1ab47ff0ff3c778916410f1b6344"} Dec 04 09:52:13 crc kubenswrapper[4707]: I1204 09:52:13.543088 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-kv6jh" Dec 04 09:52:13 crc kubenswrapper[4707]: I1204 09:52:13.979495 4707 generic.go:334] "Generic (PLEG): container finished" podID="71b37f42-f459-4829-8112-3db6b09fc06d" containerID="fe9ebad059b3ad1ee69fa5896bfcc273282ae669d363ac160aafc086d6d232f8" exitCode=0 Dec 04 09:52:13 crc kubenswrapper[4707]: I1204 09:52:13.979529 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r9wp9" event={"ID":"71b37f42-f459-4829-8112-3db6b09fc06d","Type":"ContainerDied","Data":"fe9ebad059b3ad1ee69fa5896bfcc273282ae669d363ac160aafc086d6d232f8"} Dec 04 09:52:14 crc kubenswrapper[4707]: I1204 09:52:14.432163 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-599tz" Dec 04 09:52:14 crc kubenswrapper[4707]: I1204 09:52:14.993586 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r9wp9" event={"ID":"71b37f42-f459-4829-8112-3db6b09fc06d","Type":"ContainerStarted","Data":"430c4870f4ca3d108a96b855f218e7b360ed62c86fbd67ecb77fdaab50c3ecfd"} Dec 04 09:52:14 crc kubenswrapper[4707]: I1204 09:52:14.993634 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r9wp9" event={"ID":"71b37f42-f459-4829-8112-3db6b09fc06d","Type":"ContainerStarted","Data":"466870c91d065db4b8e85d6d7da0954f2a67e9b2188441622d95e293d1c3ea78"} Dec 04 09:52:14 crc kubenswrapper[4707]: I1204 09:52:14.993645 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r9wp9" event={"ID":"71b37f42-f459-4829-8112-3db6b09fc06d","Type":"ContainerStarted","Data":"30f49b6b1fef3b11e97952cc8771aac2a6b9e3c7547fc701f1fe1ee383168405"} Dec 04 09:52:14 crc kubenswrapper[4707]: I1204 09:52:14.993655 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r9wp9" event={"ID":"71b37f42-f459-4829-8112-3db6b09fc06d","Type":"ContainerStarted","Data":"7f8cafe947a380c6b487d2035dc5699bd55e924966faba9c6e460cff01c609a8"} Dec 04 09:52:14 crc kubenswrapper[4707]: I1204 09:52:14.993668 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r9wp9" event={"ID":"71b37f42-f459-4829-8112-3db6b09fc06d","Type":"ContainerStarted","Data":"77b9a1cfceba3fdc1cc8e6ec05fb47767c34bb0cabe943c29c4483d448802cae"} Dec 04 09:52:14 crc kubenswrapper[4707]: I1204 09:52:14.993680 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r9wp9" event={"ID":"71b37f42-f459-4829-8112-3db6b09fc06d","Type":"ContainerStarted","Data":"7a36d1be1643312305bac4c082f96381f5ae7043772d228dddad149aeb5cd9a5"} Dec 04 09:52:14 crc kubenswrapper[4707]: I1204 09:52:14.993765 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:15 crc kubenswrapper[4707]: I1204 09:52:15.026419 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-r9wp9" podStartSLOduration=4.65448267 podStartE2EDuration="13.026397576s" podCreationTimestamp="2025-12-04 09:52:02 +0000 UTC" firstStartedPulling="2025-12-04 09:52:03.401202737 +0000 UTC m=+822.837025244" lastFinishedPulling="2025-12-04 09:52:11.773117643 +0000 UTC m=+831.208940150" observedRunningTime="2025-12-04 09:52:15.025816197 +0000 UTC m=+834.461638704" watchObservedRunningTime="2025-12-04 09:52:15.026397576 +0000 UTC m=+834.462220103" Dec 04 09:52:17 crc kubenswrapper[4707]: I1204 09:52:17.837722 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:17 crc kubenswrapper[4707]: I1204 09:52:17.876700 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:21 crc kubenswrapper[4707]: I1204 09:52:21.537001 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-pj2q6"] Dec 04 09:52:21 crc kubenswrapper[4707]: I1204 09:52:21.538889 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-pj2q6" Dec 04 09:52:21 crc kubenswrapper[4707]: I1204 09:52:21.552472 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-pj2q6"] Dec 04 09:52:21 crc kubenswrapper[4707]: I1204 09:52:21.553512 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 04 09:52:21 crc kubenswrapper[4707]: I1204 09:52:21.553515 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-6l2wm" Dec 04 09:52:21 crc kubenswrapper[4707]: I1204 09:52:21.555187 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 04 09:52:21 crc kubenswrapper[4707]: I1204 09:52:21.576651 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpchm\" (UniqueName: \"kubernetes.io/projected/b873bc48-23e0-43b9-a348-28ff090b9d90-kube-api-access-kpchm\") pod \"mariadb-operator-index-pj2q6\" (UID: \"b873bc48-23e0-43b9-a348-28ff090b9d90\") " pod="openstack-operators/mariadb-operator-index-pj2q6" Dec 04 09:52:21 crc kubenswrapper[4707]: I1204 09:52:21.677681 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpchm\" (UniqueName: \"kubernetes.io/projected/b873bc48-23e0-43b9-a348-28ff090b9d90-kube-api-access-kpchm\") pod \"mariadb-operator-index-pj2q6\" (UID: \"b873bc48-23e0-43b9-a348-28ff090b9d90\") " pod="openstack-operators/mariadb-operator-index-pj2q6" Dec 04 09:52:21 crc kubenswrapper[4707]: I1204 09:52:21.698293 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpchm\" (UniqueName: \"kubernetes.io/projected/b873bc48-23e0-43b9-a348-28ff090b9d90-kube-api-access-kpchm\") pod \"mariadb-operator-index-pj2q6\" (UID: \"b873bc48-23e0-43b9-a348-28ff090b9d90\") " pod="openstack-operators/mariadb-operator-index-pj2q6" Dec 04 09:52:21 crc kubenswrapper[4707]: I1204 09:52:21.873670 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-pj2q6" Dec 04 09:52:22 crc kubenswrapper[4707]: I1204 09:52:22.313755 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-pj2q6"] Dec 04 09:52:22 crc kubenswrapper[4707]: W1204 09:52:22.323958 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb873bc48_23e0_43b9_a348_28ff090b9d90.slice/crio-cb8f4101bd9cf9cfbffe2b79b80cf7a668829da9943a091ed2ad93fdea9b4428 WatchSource:0}: Error finding container cb8f4101bd9cf9cfbffe2b79b80cf7a668829da9943a091ed2ad93fdea9b4428: Status 404 returned error can't find the container with id cb8f4101bd9cf9cfbffe2b79b80cf7a668829da9943a091ed2ad93fdea9b4428 Dec 04 09:52:23 crc kubenswrapper[4707]: I1204 09:52:23.035953 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-pj2q6" event={"ID":"b873bc48-23e0-43b9-a348-28ff090b9d90","Type":"ContainerStarted","Data":"cb8f4101bd9cf9cfbffe2b79b80cf7a668829da9943a091ed2ad93fdea9b4428"} Dec 04 09:52:23 crc kubenswrapper[4707]: I1204 09:52:23.454896 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-vxncn" Dec 04 09:52:24 crc kubenswrapper[4707]: I1204 09:52:24.042460 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-pj2q6" event={"ID":"b873bc48-23e0-43b9-a348-28ff090b9d90","Type":"ContainerStarted","Data":"4229cf2b01f0acd0097471247c9b48830f9d96c53dc1622ec254b55947c5ea72"} Dec 04 09:52:24 crc kubenswrapper[4707]: I1204 09:52:24.059106 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-pj2q6" podStartSLOduration=2.2758669 podStartE2EDuration="3.05908658s" podCreationTimestamp="2025-12-04 09:52:21 +0000 UTC" firstStartedPulling="2025-12-04 09:52:22.326059721 +0000 UTC m=+841.761882228" lastFinishedPulling="2025-12-04 09:52:23.109279401 +0000 UTC m=+842.545101908" observedRunningTime="2025-12-04 09:52:24.057482298 +0000 UTC m=+843.493304825" watchObservedRunningTime="2025-12-04 09:52:24.05908658 +0000 UTC m=+843.494909087" Dec 04 09:52:24 crc kubenswrapper[4707]: I1204 09:52:24.912246 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-pj2q6"] Dec 04 09:52:25 crc kubenswrapper[4707]: I1204 09:52:25.515126 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-rtkfp"] Dec 04 09:52:25 crc kubenswrapper[4707]: I1204 09:52:25.516096 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-rtkfp" Dec 04 09:52:25 crc kubenswrapper[4707]: I1204 09:52:25.523440 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-rtkfp"] Dec 04 09:52:25 crc kubenswrapper[4707]: I1204 09:52:25.622352 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwtcw\" (UniqueName: \"kubernetes.io/projected/f096c710-2d6f-4a74-b437-6557a3d009a4-kube-api-access-wwtcw\") pod \"mariadb-operator-index-rtkfp\" (UID: \"f096c710-2d6f-4a74-b437-6557a3d009a4\") " pod="openstack-operators/mariadb-operator-index-rtkfp" Dec 04 09:52:25 crc kubenswrapper[4707]: I1204 09:52:25.723835 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwtcw\" (UniqueName: \"kubernetes.io/projected/f096c710-2d6f-4a74-b437-6557a3d009a4-kube-api-access-wwtcw\") pod \"mariadb-operator-index-rtkfp\" (UID: \"f096c710-2d6f-4a74-b437-6557a3d009a4\") " pod="openstack-operators/mariadb-operator-index-rtkfp" Dec 04 09:52:25 crc kubenswrapper[4707]: I1204 09:52:25.743177 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwtcw\" (UniqueName: \"kubernetes.io/projected/f096c710-2d6f-4a74-b437-6557a3d009a4-kube-api-access-wwtcw\") pod \"mariadb-operator-index-rtkfp\" (UID: \"f096c710-2d6f-4a74-b437-6557a3d009a4\") " pod="openstack-operators/mariadb-operator-index-rtkfp" Dec 04 09:52:25 crc kubenswrapper[4707]: I1204 09:52:25.880565 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-rtkfp" Dec 04 09:52:26 crc kubenswrapper[4707]: I1204 09:52:26.052101 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-pj2q6" podUID="b873bc48-23e0-43b9-a348-28ff090b9d90" containerName="registry-server" containerID="cri-o://4229cf2b01f0acd0097471247c9b48830f9d96c53dc1622ec254b55947c5ea72" gracePeriod=2 Dec 04 09:52:26 crc kubenswrapper[4707]: I1204 09:52:26.308123 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-rtkfp"] Dec 04 09:52:26 crc kubenswrapper[4707]: W1204 09:52:26.310897 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf096c710_2d6f_4a74_b437_6557a3d009a4.slice/crio-a4d9b8efe9aa4533e4c09c6c175b25d59e833e2c64c75fd1d3527f2f8aaf96a8 WatchSource:0}: Error finding container a4d9b8efe9aa4533e4c09c6c175b25d59e833e2c64c75fd1d3527f2f8aaf96a8: Status 404 returned error can't find the container with id a4d9b8efe9aa4533e4c09c6c175b25d59e833e2c64c75fd1d3527f2f8aaf96a8 Dec 04 09:52:26 crc kubenswrapper[4707]: I1204 09:52:26.372627 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-pj2q6" Dec 04 09:52:26 crc kubenswrapper[4707]: I1204 09:52:26.537953 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpchm\" (UniqueName: \"kubernetes.io/projected/b873bc48-23e0-43b9-a348-28ff090b9d90-kube-api-access-kpchm\") pod \"b873bc48-23e0-43b9-a348-28ff090b9d90\" (UID: \"b873bc48-23e0-43b9-a348-28ff090b9d90\") " Dec 04 09:52:26 crc kubenswrapper[4707]: I1204 09:52:26.541372 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b873bc48-23e0-43b9-a348-28ff090b9d90-kube-api-access-kpchm" (OuterVolumeSpecName: "kube-api-access-kpchm") pod "b873bc48-23e0-43b9-a348-28ff090b9d90" (UID: "b873bc48-23e0-43b9-a348-28ff090b9d90"). InnerVolumeSpecName "kube-api-access-kpchm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:52:26 crc kubenswrapper[4707]: I1204 09:52:26.639557 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpchm\" (UniqueName: \"kubernetes.io/projected/b873bc48-23e0-43b9-a348-28ff090b9d90-kube-api-access-kpchm\") on node \"crc\" DevicePath \"\"" Dec 04 09:52:27 crc kubenswrapper[4707]: I1204 09:52:27.058803 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-rtkfp" event={"ID":"f096c710-2d6f-4a74-b437-6557a3d009a4","Type":"ContainerStarted","Data":"b6ba2e5a99c8f0d6d22de7bdf7e4456dbc7c6faa1cc4a8edc8c4699df1ab0687"} Dec 04 09:52:27 crc kubenswrapper[4707]: I1204 09:52:27.059114 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-rtkfp" event={"ID":"f096c710-2d6f-4a74-b437-6557a3d009a4","Type":"ContainerStarted","Data":"a4d9b8efe9aa4533e4c09c6c175b25d59e833e2c64c75fd1d3527f2f8aaf96a8"} Dec 04 09:52:27 crc kubenswrapper[4707]: I1204 09:52:27.060859 4707 generic.go:334] "Generic (PLEG): container finished" podID="b873bc48-23e0-43b9-a348-28ff090b9d90" containerID="4229cf2b01f0acd0097471247c9b48830f9d96c53dc1622ec254b55947c5ea72" exitCode=0 Dec 04 09:52:27 crc kubenswrapper[4707]: I1204 09:52:27.060913 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-pj2q6" event={"ID":"b873bc48-23e0-43b9-a348-28ff090b9d90","Type":"ContainerDied","Data":"4229cf2b01f0acd0097471247c9b48830f9d96c53dc1622ec254b55947c5ea72"} Dec 04 09:52:27 crc kubenswrapper[4707]: I1204 09:52:27.060943 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-pj2q6" event={"ID":"b873bc48-23e0-43b9-a348-28ff090b9d90","Type":"ContainerDied","Data":"cb8f4101bd9cf9cfbffe2b79b80cf7a668829da9943a091ed2ad93fdea9b4428"} Dec 04 09:52:27 crc kubenswrapper[4707]: I1204 09:52:27.060964 4707 scope.go:117] "RemoveContainer" containerID="4229cf2b01f0acd0097471247c9b48830f9d96c53dc1622ec254b55947c5ea72" Dec 04 09:52:27 crc kubenswrapper[4707]: I1204 09:52:27.061086 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-pj2q6" Dec 04 09:52:27 crc kubenswrapper[4707]: I1204 09:52:27.073077 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-rtkfp" podStartSLOduration=1.5858913650000002 podStartE2EDuration="2.073059406s" podCreationTimestamp="2025-12-04 09:52:25 +0000 UTC" firstStartedPulling="2025-12-04 09:52:26.314744631 +0000 UTC m=+845.750567148" lastFinishedPulling="2025-12-04 09:52:26.801912682 +0000 UTC m=+846.237735189" observedRunningTime="2025-12-04 09:52:27.072477238 +0000 UTC m=+846.508299745" watchObservedRunningTime="2025-12-04 09:52:27.073059406 +0000 UTC m=+846.508881913" Dec 04 09:52:27 crc kubenswrapper[4707]: I1204 09:52:27.074772 4707 scope.go:117] "RemoveContainer" containerID="4229cf2b01f0acd0097471247c9b48830f9d96c53dc1622ec254b55947c5ea72" Dec 04 09:52:27 crc kubenswrapper[4707]: E1204 09:52:27.075274 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4229cf2b01f0acd0097471247c9b48830f9d96c53dc1622ec254b55947c5ea72\": container with ID starting with 4229cf2b01f0acd0097471247c9b48830f9d96c53dc1622ec254b55947c5ea72 not found: ID does not exist" containerID="4229cf2b01f0acd0097471247c9b48830f9d96c53dc1622ec254b55947c5ea72" Dec 04 09:52:27 crc kubenswrapper[4707]: I1204 09:52:27.075308 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4229cf2b01f0acd0097471247c9b48830f9d96c53dc1622ec254b55947c5ea72"} err="failed to get container status \"4229cf2b01f0acd0097471247c9b48830f9d96c53dc1622ec254b55947c5ea72\": rpc error: code = NotFound desc = could not find container \"4229cf2b01f0acd0097471247c9b48830f9d96c53dc1622ec254b55947c5ea72\": container with ID starting with 4229cf2b01f0acd0097471247c9b48830f9d96c53dc1622ec254b55947c5ea72 not found: ID does not exist" Dec 04 09:52:27 crc kubenswrapper[4707]: I1204 09:52:27.088327 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-pj2q6"] Dec 04 09:52:27 crc kubenswrapper[4707]: I1204 09:52:27.092217 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-pj2q6"] Dec 04 09:52:28 crc kubenswrapper[4707]: I1204 09:52:28.852550 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b873bc48-23e0-43b9-a348-28ff090b9d90" path="/var/lib/kubelet/pods/b873bc48-23e0-43b9-a348-28ff090b9d90/volumes" Dec 04 09:52:30 crc kubenswrapper[4707]: I1204 09:52:30.816977 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:52:30 crc kubenswrapper[4707]: I1204 09:52:30.817306 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:52:30 crc kubenswrapper[4707]: I1204 09:52:30.817408 4707 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:52:30 crc kubenswrapper[4707]: I1204 09:52:30.817977 4707 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"38c08b074cc460fc3513402fc630433eabee9cc90aab57117db09d7ee10fc03a"} pod="openshift-machine-config-operator/machine-config-daemon-c244z" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 09:52:30 crc kubenswrapper[4707]: I1204 09:52:30.818032 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" containerID="cri-o://38c08b074cc460fc3513402fc630433eabee9cc90aab57117db09d7ee10fc03a" gracePeriod=600 Dec 04 09:52:32 crc kubenswrapper[4707]: I1204 09:52:32.099899 4707 generic.go:334] "Generic (PLEG): container finished" podID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerID="38c08b074cc460fc3513402fc630433eabee9cc90aab57117db09d7ee10fc03a" exitCode=0 Dec 04 09:52:32 crc kubenswrapper[4707]: I1204 09:52:32.099966 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerDied","Data":"38c08b074cc460fc3513402fc630433eabee9cc90aab57117db09d7ee10fc03a"} Dec 04 09:52:32 crc kubenswrapper[4707]: I1204 09:52:32.100301 4707 scope.go:117] "RemoveContainer" containerID="59933ba8a620838bec3b3d9fcf426225c5ef7bc5c9d364798d360eff4e7c02b9" Dec 04 09:52:32 crc kubenswrapper[4707]: I1204 09:52:32.843533 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-r9wp9" Dec 04 09:52:33 crc kubenswrapper[4707]: I1204 09:52:33.108667 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerStarted","Data":"7beea26bd12c9b8a3dede9145f5d95cbd909ef83792c0ec9ae43b628c20e8918"} Dec 04 09:52:35 crc kubenswrapper[4707]: I1204 09:52:35.880813 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/mariadb-operator-index-rtkfp" Dec 04 09:52:35 crc kubenswrapper[4707]: I1204 09:52:35.881502 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-rtkfp" Dec 04 09:52:35 crc kubenswrapper[4707]: I1204 09:52:35.908287 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/mariadb-operator-index-rtkfp" Dec 04 09:52:36 crc kubenswrapper[4707]: I1204 09:52:36.155308 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-index-rtkfp" Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.748295 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz"] Dec 04 09:52:37 crc kubenswrapper[4707]: E1204 09:52:37.748798 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b873bc48-23e0-43b9-a348-28ff090b9d90" containerName="registry-server" Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.748811 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="b873bc48-23e0-43b9-a348-28ff090b9d90" containerName="registry-server" Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.748941 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="b873bc48-23e0-43b9-a348-28ff090b9d90" containerName="registry-server" Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.749824 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.753421 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-zklcg" Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.765814 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz"] Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.878120 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/934d114a-0b4c-4bfe-aca3-5518bf105171-bundle\") pod \"55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz\" (UID: \"934d114a-0b4c-4bfe-aca3-5518bf105171\") " pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.878203 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/934d114a-0b4c-4bfe-aca3-5518bf105171-util\") pod \"55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz\" (UID: \"934d114a-0b4c-4bfe-aca3-5518bf105171\") " pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.878241 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72442\" (UniqueName: \"kubernetes.io/projected/934d114a-0b4c-4bfe-aca3-5518bf105171-kube-api-access-72442\") pod \"55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz\" (UID: \"934d114a-0b4c-4bfe-aca3-5518bf105171\") " pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.979399 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/934d114a-0b4c-4bfe-aca3-5518bf105171-util\") pod \"55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz\" (UID: \"934d114a-0b4c-4bfe-aca3-5518bf105171\") " pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.979460 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72442\" (UniqueName: \"kubernetes.io/projected/934d114a-0b4c-4bfe-aca3-5518bf105171-kube-api-access-72442\") pod \"55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz\" (UID: \"934d114a-0b4c-4bfe-aca3-5518bf105171\") " pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.979530 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/934d114a-0b4c-4bfe-aca3-5518bf105171-bundle\") pod \"55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz\" (UID: \"934d114a-0b4c-4bfe-aca3-5518bf105171\") " pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.980093 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/934d114a-0b4c-4bfe-aca3-5518bf105171-util\") pod \"55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz\" (UID: \"934d114a-0b4c-4bfe-aca3-5518bf105171\") " pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.980182 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/934d114a-0b4c-4bfe-aca3-5518bf105171-bundle\") pod \"55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz\" (UID: \"934d114a-0b4c-4bfe-aca3-5518bf105171\") " pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" Dec 04 09:52:37 crc kubenswrapper[4707]: I1204 09:52:37.997268 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72442\" (UniqueName: \"kubernetes.io/projected/934d114a-0b4c-4bfe-aca3-5518bf105171-kube-api-access-72442\") pod \"55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz\" (UID: \"934d114a-0b4c-4bfe-aca3-5518bf105171\") " pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" Dec 04 09:52:38 crc kubenswrapper[4707]: I1204 09:52:38.067203 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" Dec 04 09:52:38 crc kubenswrapper[4707]: I1204 09:52:38.270377 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz"] Dec 04 09:52:39 crc kubenswrapper[4707]: I1204 09:52:39.144668 4707 generic.go:334] "Generic (PLEG): container finished" podID="934d114a-0b4c-4bfe-aca3-5518bf105171" containerID="fc6facd73b61b7e1a21d05c1bb0b4b5444785f59b0ddcde92bba3041bf76de0f" exitCode=0 Dec 04 09:52:39 crc kubenswrapper[4707]: I1204 09:52:39.144838 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" event={"ID":"934d114a-0b4c-4bfe-aca3-5518bf105171","Type":"ContainerDied","Data":"fc6facd73b61b7e1a21d05c1bb0b4b5444785f59b0ddcde92bba3041bf76de0f"} Dec 04 09:52:39 crc kubenswrapper[4707]: I1204 09:52:39.144995 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" event={"ID":"934d114a-0b4c-4bfe-aca3-5518bf105171","Type":"ContainerStarted","Data":"5244253c06dae75748870904550c287ac787a6ee3b0dec523403076e6bdd3127"} Dec 04 09:52:41 crc kubenswrapper[4707]: I1204 09:52:41.158497 4707 generic.go:334] "Generic (PLEG): container finished" podID="934d114a-0b4c-4bfe-aca3-5518bf105171" containerID="a8835b4f5add4ca685fde584dac75622ac4adef9095cc7a0118176dcc0878afe" exitCode=0 Dec 04 09:52:41 crc kubenswrapper[4707]: I1204 09:52:41.158569 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" event={"ID":"934d114a-0b4c-4bfe-aca3-5518bf105171","Type":"ContainerDied","Data":"a8835b4f5add4ca685fde584dac75622ac4adef9095cc7a0118176dcc0878afe"} Dec 04 09:52:42 crc kubenswrapper[4707]: I1204 09:52:42.166390 4707 generic.go:334] "Generic (PLEG): container finished" podID="934d114a-0b4c-4bfe-aca3-5518bf105171" containerID="63fae04690d14548a142422581105f91635478ef809a563ca303196093ec0efb" exitCode=0 Dec 04 09:52:42 crc kubenswrapper[4707]: I1204 09:52:42.166592 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" event={"ID":"934d114a-0b4c-4bfe-aca3-5518bf105171","Type":"ContainerDied","Data":"63fae04690d14548a142422581105f91635478ef809a563ca303196093ec0efb"} Dec 04 09:52:43 crc kubenswrapper[4707]: I1204 09:52:43.462804 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" Dec 04 09:52:43 crc kubenswrapper[4707]: I1204 09:52:43.626021 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/934d114a-0b4c-4bfe-aca3-5518bf105171-util\") pod \"934d114a-0b4c-4bfe-aca3-5518bf105171\" (UID: \"934d114a-0b4c-4bfe-aca3-5518bf105171\") " Dec 04 09:52:43 crc kubenswrapper[4707]: I1204 09:52:43.626416 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/934d114a-0b4c-4bfe-aca3-5518bf105171-bundle\") pod \"934d114a-0b4c-4bfe-aca3-5518bf105171\" (UID: \"934d114a-0b4c-4bfe-aca3-5518bf105171\") " Dec 04 09:52:43 crc kubenswrapper[4707]: I1204 09:52:43.626621 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72442\" (UniqueName: \"kubernetes.io/projected/934d114a-0b4c-4bfe-aca3-5518bf105171-kube-api-access-72442\") pod \"934d114a-0b4c-4bfe-aca3-5518bf105171\" (UID: \"934d114a-0b4c-4bfe-aca3-5518bf105171\") " Dec 04 09:52:43 crc kubenswrapper[4707]: I1204 09:52:43.627277 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/934d114a-0b4c-4bfe-aca3-5518bf105171-bundle" (OuterVolumeSpecName: "bundle") pod "934d114a-0b4c-4bfe-aca3-5518bf105171" (UID: "934d114a-0b4c-4bfe-aca3-5518bf105171"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:52:43 crc kubenswrapper[4707]: I1204 09:52:43.631455 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/934d114a-0b4c-4bfe-aca3-5518bf105171-kube-api-access-72442" (OuterVolumeSpecName: "kube-api-access-72442") pod "934d114a-0b4c-4bfe-aca3-5518bf105171" (UID: "934d114a-0b4c-4bfe-aca3-5518bf105171"). InnerVolumeSpecName "kube-api-access-72442". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:52:43 crc kubenswrapper[4707]: I1204 09:52:43.641062 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/934d114a-0b4c-4bfe-aca3-5518bf105171-util" (OuterVolumeSpecName: "util") pod "934d114a-0b4c-4bfe-aca3-5518bf105171" (UID: "934d114a-0b4c-4bfe-aca3-5518bf105171"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:52:43 crc kubenswrapper[4707]: I1204 09:52:43.728067 4707 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/934d114a-0b4c-4bfe-aca3-5518bf105171-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:52:43 crc kubenswrapper[4707]: I1204 09:52:43.728099 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72442\" (UniqueName: \"kubernetes.io/projected/934d114a-0b4c-4bfe-aca3-5518bf105171-kube-api-access-72442\") on node \"crc\" DevicePath \"\"" Dec 04 09:52:43 crc kubenswrapper[4707]: I1204 09:52:43.728113 4707 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/934d114a-0b4c-4bfe-aca3-5518bf105171-util\") on node \"crc\" DevicePath \"\"" Dec 04 09:52:44 crc kubenswrapper[4707]: I1204 09:52:44.233928 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" event={"ID":"934d114a-0b4c-4bfe-aca3-5518bf105171","Type":"ContainerDied","Data":"5244253c06dae75748870904550c287ac787a6ee3b0dec523403076e6bdd3127"} Dec 04 09:52:44 crc kubenswrapper[4707]: I1204 09:52:44.233977 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5244253c06dae75748870904550c287ac787a6ee3b0dec523403076e6bdd3127" Dec 04 09:52:44 crc kubenswrapper[4707]: I1204 09:52:44.233973 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz" Dec 04 09:52:50 crc kubenswrapper[4707]: I1204 09:52:50.867037 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l"] Dec 04 09:52:50 crc kubenswrapper[4707]: E1204 09:52:50.872744 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="934d114a-0b4c-4bfe-aca3-5518bf105171" containerName="extract" Dec 04 09:52:50 crc kubenswrapper[4707]: I1204 09:52:50.872766 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="934d114a-0b4c-4bfe-aca3-5518bf105171" containerName="extract" Dec 04 09:52:50 crc kubenswrapper[4707]: E1204 09:52:50.872779 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="934d114a-0b4c-4bfe-aca3-5518bf105171" containerName="util" Dec 04 09:52:50 crc kubenswrapper[4707]: I1204 09:52:50.872785 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="934d114a-0b4c-4bfe-aca3-5518bf105171" containerName="util" Dec 04 09:52:50 crc kubenswrapper[4707]: E1204 09:52:50.872807 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="934d114a-0b4c-4bfe-aca3-5518bf105171" containerName="pull" Dec 04 09:52:50 crc kubenswrapper[4707]: I1204 09:52:50.872813 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="934d114a-0b4c-4bfe-aca3-5518bf105171" containerName="pull" Dec 04 09:52:50 crc kubenswrapper[4707]: I1204 09:52:50.872972 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="934d114a-0b4c-4bfe-aca3-5518bf105171" containerName="extract" Dec 04 09:52:50 crc kubenswrapper[4707]: I1204 09:52:50.873414 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 09:52:50 crc kubenswrapper[4707]: I1204 09:52:50.880738 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 04 09:52:50 crc kubenswrapper[4707]: I1204 09:52:50.880752 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Dec 04 09:52:50 crc kubenswrapper[4707]: I1204 09:52:50.880918 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-745x2" Dec 04 09:52:50 crc kubenswrapper[4707]: I1204 09:52:50.901048 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l"] Dec 04 09:52:51 crc kubenswrapper[4707]: I1204 09:52:51.046169 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ee15d19c-9f87-47f6-b686-76e39c0477e9-webhook-cert\") pod \"mariadb-operator-controller-manager-5c4ddb68d5-kpz9l\" (UID: \"ee15d19c-9f87-47f6-b686-76e39c0477e9\") " pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 09:52:51 crc kubenswrapper[4707]: I1204 09:52:51.046230 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7rm4\" (UniqueName: \"kubernetes.io/projected/ee15d19c-9f87-47f6-b686-76e39c0477e9-kube-api-access-q7rm4\") pod \"mariadb-operator-controller-manager-5c4ddb68d5-kpz9l\" (UID: \"ee15d19c-9f87-47f6-b686-76e39c0477e9\") " pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 09:52:51 crc kubenswrapper[4707]: I1204 09:52:51.046268 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ee15d19c-9f87-47f6-b686-76e39c0477e9-apiservice-cert\") pod \"mariadb-operator-controller-manager-5c4ddb68d5-kpz9l\" (UID: \"ee15d19c-9f87-47f6-b686-76e39c0477e9\") " pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 09:52:51 crc kubenswrapper[4707]: I1204 09:52:51.147552 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ee15d19c-9f87-47f6-b686-76e39c0477e9-webhook-cert\") pod \"mariadb-operator-controller-manager-5c4ddb68d5-kpz9l\" (UID: \"ee15d19c-9f87-47f6-b686-76e39c0477e9\") " pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 09:52:51 crc kubenswrapper[4707]: I1204 09:52:51.147622 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7rm4\" (UniqueName: \"kubernetes.io/projected/ee15d19c-9f87-47f6-b686-76e39c0477e9-kube-api-access-q7rm4\") pod \"mariadb-operator-controller-manager-5c4ddb68d5-kpz9l\" (UID: \"ee15d19c-9f87-47f6-b686-76e39c0477e9\") " pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 09:52:51 crc kubenswrapper[4707]: I1204 09:52:51.147654 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ee15d19c-9f87-47f6-b686-76e39c0477e9-apiservice-cert\") pod \"mariadb-operator-controller-manager-5c4ddb68d5-kpz9l\" (UID: \"ee15d19c-9f87-47f6-b686-76e39c0477e9\") " pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 09:52:51 crc kubenswrapper[4707]: I1204 09:52:51.156222 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ee15d19c-9f87-47f6-b686-76e39c0477e9-webhook-cert\") pod \"mariadb-operator-controller-manager-5c4ddb68d5-kpz9l\" (UID: \"ee15d19c-9f87-47f6-b686-76e39c0477e9\") " pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 09:52:51 crc kubenswrapper[4707]: I1204 09:52:51.161614 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ee15d19c-9f87-47f6-b686-76e39c0477e9-apiservice-cert\") pod \"mariadb-operator-controller-manager-5c4ddb68d5-kpz9l\" (UID: \"ee15d19c-9f87-47f6-b686-76e39c0477e9\") " pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 09:52:51 crc kubenswrapper[4707]: I1204 09:52:51.163733 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7rm4\" (UniqueName: \"kubernetes.io/projected/ee15d19c-9f87-47f6-b686-76e39c0477e9-kube-api-access-q7rm4\") pod \"mariadb-operator-controller-manager-5c4ddb68d5-kpz9l\" (UID: \"ee15d19c-9f87-47f6-b686-76e39c0477e9\") " pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 09:52:51 crc kubenswrapper[4707]: I1204 09:52:51.198539 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 09:52:51 crc kubenswrapper[4707]: I1204 09:52:51.496752 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l"] Dec 04 09:52:52 crc kubenswrapper[4707]: I1204 09:52:52.277739 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" event={"ID":"ee15d19c-9f87-47f6-b686-76e39c0477e9","Type":"ContainerStarted","Data":"cdb02daaefb2a4383aae865758c2cebd7eeac72eccee9fabfc63289744d4361a"} Dec 04 09:52:56 crc kubenswrapper[4707]: I1204 09:52:56.303114 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" event={"ID":"ee15d19c-9f87-47f6-b686-76e39c0477e9","Type":"ContainerStarted","Data":"df9f610b9a8ff47d0b12d8aa4b382ebc92006b941cc83ad4d6dc39724c06dfb4"} Dec 04 09:52:56 crc kubenswrapper[4707]: I1204 09:52:56.303668 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 09:52:56 crc kubenswrapper[4707]: I1204 09:52:56.322395 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" podStartSLOduration=1.891752374 podStartE2EDuration="6.322379137s" podCreationTimestamp="2025-12-04 09:52:50 +0000 UTC" firstStartedPulling="2025-12-04 09:52:51.50392424 +0000 UTC m=+870.939746747" lastFinishedPulling="2025-12-04 09:52:55.934551003 +0000 UTC m=+875.370373510" observedRunningTime="2025-12-04 09:52:56.319381962 +0000 UTC m=+875.755204469" watchObservedRunningTime="2025-12-04 09:52:56.322379137 +0000 UTC m=+875.758201644" Dec 04 09:53:01 crc kubenswrapper[4707]: I1204 09:53:01.204067 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 09:53:05 crc kubenswrapper[4707]: I1204 09:53:05.444831 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-nd5wt"] Dec 04 09:53:05 crc kubenswrapper[4707]: I1204 09:53:05.445889 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-nd5wt" Dec 04 09:53:05 crc kubenswrapper[4707]: I1204 09:53:05.448153 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-index-dockercfg-s5khl" Dec 04 09:53:05 crc kubenswrapper[4707]: I1204 09:53:05.452753 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-nd5wt"] Dec 04 09:53:05 crc kubenswrapper[4707]: I1204 09:53:05.582517 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28w9c\" (UniqueName: \"kubernetes.io/projected/fca8013f-27a6-4450-93d8-8a5949d66b59-kube-api-access-28w9c\") pod \"infra-operator-index-nd5wt\" (UID: \"fca8013f-27a6-4450-93d8-8a5949d66b59\") " pod="openstack-operators/infra-operator-index-nd5wt" Dec 04 09:53:05 crc kubenswrapper[4707]: I1204 09:53:05.683710 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28w9c\" (UniqueName: \"kubernetes.io/projected/fca8013f-27a6-4450-93d8-8a5949d66b59-kube-api-access-28w9c\") pod \"infra-operator-index-nd5wt\" (UID: \"fca8013f-27a6-4450-93d8-8a5949d66b59\") " pod="openstack-operators/infra-operator-index-nd5wt" Dec 04 09:53:05 crc kubenswrapper[4707]: I1204 09:53:05.719271 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28w9c\" (UniqueName: \"kubernetes.io/projected/fca8013f-27a6-4450-93d8-8a5949d66b59-kube-api-access-28w9c\") pod \"infra-operator-index-nd5wt\" (UID: \"fca8013f-27a6-4450-93d8-8a5949d66b59\") " pod="openstack-operators/infra-operator-index-nd5wt" Dec 04 09:53:05 crc kubenswrapper[4707]: I1204 09:53:05.779824 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-nd5wt" Dec 04 09:53:06 crc kubenswrapper[4707]: W1204 09:53:06.271046 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfca8013f_27a6_4450_93d8_8a5949d66b59.slice/crio-004c46050ce40c6ea74adc25c8f8f1958e656deb918cfb3d36c99d33bd647a65 WatchSource:0}: Error finding container 004c46050ce40c6ea74adc25c8f8f1958e656deb918cfb3d36c99d33bd647a65: Status 404 returned error can't find the container with id 004c46050ce40c6ea74adc25c8f8f1958e656deb918cfb3d36c99d33bd647a65 Dec 04 09:53:06 crc kubenswrapper[4707]: I1204 09:53:06.272545 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-nd5wt"] Dec 04 09:53:06 crc kubenswrapper[4707]: I1204 09:53:06.489230 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-nd5wt" event={"ID":"fca8013f-27a6-4450-93d8-8a5949d66b59","Type":"ContainerStarted","Data":"004c46050ce40c6ea74adc25c8f8f1958e656deb918cfb3d36c99d33bd647a65"} Dec 04 09:53:22 crc kubenswrapper[4707]: I1204 09:53:22.822422 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-nd5wt" event={"ID":"fca8013f-27a6-4450-93d8-8a5949d66b59","Type":"ContainerStarted","Data":"c7e35a219b0ff347918508de2189b83c79d89eb038129a7160bd891d474d4ac2"} Dec 04 09:53:22 crc kubenswrapper[4707]: I1204 09:53:22.838626 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-nd5wt" podStartSLOduration=2.397769452 podStartE2EDuration="17.838612217s" podCreationTimestamp="2025-12-04 09:53:05 +0000 UTC" firstStartedPulling="2025-12-04 09:53:06.280111038 +0000 UTC m=+885.715933545" lastFinishedPulling="2025-12-04 09:53:21.720953813 +0000 UTC m=+901.156776310" observedRunningTime="2025-12-04 09:53:22.834198528 +0000 UTC m=+902.270021055" watchObservedRunningTime="2025-12-04 09:53:22.838612217 +0000 UTC m=+902.274434724" Dec 04 09:53:25 crc kubenswrapper[4707]: I1204 09:53:25.780523 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-index-nd5wt" Dec 04 09:53:25 crc kubenswrapper[4707]: I1204 09:53:25.780608 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/infra-operator-index-nd5wt" Dec 04 09:53:25 crc kubenswrapper[4707]: I1204 09:53:25.812327 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/infra-operator-index-nd5wt" Dec 04 09:53:31 crc kubenswrapper[4707]: I1204 09:53:31.631984 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4rflr"] Dec 04 09:53:31 crc kubenswrapper[4707]: I1204 09:53:31.633842 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:31 crc kubenswrapper[4707]: I1204 09:53:31.641360 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4rflr"] Dec 04 09:53:31 crc kubenswrapper[4707]: I1204 09:53:31.690700 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4564df65-15e6-48e6-9e52-f6423f696a9c-catalog-content\") pod \"certified-operators-4rflr\" (UID: \"4564df65-15e6-48e6-9e52-f6423f696a9c\") " pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:31 crc kubenswrapper[4707]: I1204 09:53:31.690744 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4564df65-15e6-48e6-9e52-f6423f696a9c-utilities\") pod \"certified-operators-4rflr\" (UID: \"4564df65-15e6-48e6-9e52-f6423f696a9c\") " pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:31 crc kubenswrapper[4707]: I1204 09:53:31.690792 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqv46\" (UniqueName: \"kubernetes.io/projected/4564df65-15e6-48e6-9e52-f6423f696a9c-kube-api-access-zqv46\") pod \"certified-operators-4rflr\" (UID: \"4564df65-15e6-48e6-9e52-f6423f696a9c\") " pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:31 crc kubenswrapper[4707]: I1204 09:53:31.792325 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4564df65-15e6-48e6-9e52-f6423f696a9c-catalog-content\") pod \"certified-operators-4rflr\" (UID: \"4564df65-15e6-48e6-9e52-f6423f696a9c\") " pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:31 crc kubenswrapper[4707]: I1204 09:53:31.792388 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4564df65-15e6-48e6-9e52-f6423f696a9c-utilities\") pod \"certified-operators-4rflr\" (UID: \"4564df65-15e6-48e6-9e52-f6423f696a9c\") " pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:31 crc kubenswrapper[4707]: I1204 09:53:31.792419 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqv46\" (UniqueName: \"kubernetes.io/projected/4564df65-15e6-48e6-9e52-f6423f696a9c-kube-api-access-zqv46\") pod \"certified-operators-4rflr\" (UID: \"4564df65-15e6-48e6-9e52-f6423f696a9c\") " pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:31 crc kubenswrapper[4707]: I1204 09:53:31.793030 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4564df65-15e6-48e6-9e52-f6423f696a9c-utilities\") pod \"certified-operators-4rflr\" (UID: \"4564df65-15e6-48e6-9e52-f6423f696a9c\") " pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:31 crc kubenswrapper[4707]: I1204 09:53:31.793294 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4564df65-15e6-48e6-9e52-f6423f696a9c-catalog-content\") pod \"certified-operators-4rflr\" (UID: \"4564df65-15e6-48e6-9e52-f6423f696a9c\") " pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:31 crc kubenswrapper[4707]: I1204 09:53:31.812907 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqv46\" (UniqueName: \"kubernetes.io/projected/4564df65-15e6-48e6-9e52-f6423f696a9c-kube-api-access-zqv46\") pod \"certified-operators-4rflr\" (UID: \"4564df65-15e6-48e6-9e52-f6423f696a9c\") " pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:31 crc kubenswrapper[4707]: I1204 09:53:31.960326 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:32 crc kubenswrapper[4707]: I1204 09:53:32.376455 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4rflr"] Dec 04 09:53:32 crc kubenswrapper[4707]: I1204 09:53:32.881897 4707 generic.go:334] "Generic (PLEG): container finished" podID="4564df65-15e6-48e6-9e52-f6423f696a9c" containerID="fdf455f9eb2469c731f5a7cac4bc1e2b57c928cf8c13d126c4ad585354d6efb5" exitCode=0 Dec 04 09:53:32 crc kubenswrapper[4707]: I1204 09:53:32.881996 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4rflr" event={"ID":"4564df65-15e6-48e6-9e52-f6423f696a9c","Type":"ContainerDied","Data":"fdf455f9eb2469c731f5a7cac4bc1e2b57c928cf8c13d126c4ad585354d6efb5"} Dec 04 09:53:32 crc kubenswrapper[4707]: I1204 09:53:32.882514 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4rflr" event={"ID":"4564df65-15e6-48e6-9e52-f6423f696a9c","Type":"ContainerStarted","Data":"01726dc60fe1fd2d167d8939aeb31a6b92091de93054827664ea2a052904e124"} Dec 04 09:53:34 crc kubenswrapper[4707]: I1204 09:53:34.896572 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4rflr" event={"ID":"4564df65-15e6-48e6-9e52-f6423f696a9c","Type":"ContainerStarted","Data":"cddbfadef4f54ca218e6e5ae2bb312901e20bb738b76c49e602e0dac15deed28"} Dec 04 09:53:35 crc kubenswrapper[4707]: I1204 09:53:35.812237 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-index-nd5wt" Dec 04 09:53:35 crc kubenswrapper[4707]: I1204 09:53:35.903961 4707 generic.go:334] "Generic (PLEG): container finished" podID="4564df65-15e6-48e6-9e52-f6423f696a9c" containerID="cddbfadef4f54ca218e6e5ae2bb312901e20bb738b76c49e602e0dac15deed28" exitCode=0 Dec 04 09:53:35 crc kubenswrapper[4707]: I1204 09:53:35.904024 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4rflr" event={"ID":"4564df65-15e6-48e6-9e52-f6423f696a9c","Type":"ContainerDied","Data":"cddbfadef4f54ca218e6e5ae2bb312901e20bb738b76c49e602e0dac15deed28"} Dec 04 09:53:36 crc kubenswrapper[4707]: I1204 09:53:36.633155 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ktmqc"] Dec 04 09:53:36 crc kubenswrapper[4707]: I1204 09:53:36.634494 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:36 crc kubenswrapper[4707]: I1204 09:53:36.643869 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ktmqc"] Dec 04 09:53:36 crc kubenswrapper[4707]: I1204 09:53:36.651002 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe797225-935d-4660-8763-8877a6eb5563-catalog-content\") pod \"redhat-marketplace-ktmqc\" (UID: \"fe797225-935d-4660-8763-8877a6eb5563\") " pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:36 crc kubenswrapper[4707]: I1204 09:53:36.651057 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe797225-935d-4660-8763-8877a6eb5563-utilities\") pod \"redhat-marketplace-ktmqc\" (UID: \"fe797225-935d-4660-8763-8877a6eb5563\") " pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:36 crc kubenswrapper[4707]: I1204 09:53:36.651085 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5mmr\" (UniqueName: \"kubernetes.io/projected/fe797225-935d-4660-8763-8877a6eb5563-kube-api-access-n5mmr\") pod \"redhat-marketplace-ktmqc\" (UID: \"fe797225-935d-4660-8763-8877a6eb5563\") " pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:36 crc kubenswrapper[4707]: I1204 09:53:36.751695 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe797225-935d-4660-8763-8877a6eb5563-catalog-content\") pod \"redhat-marketplace-ktmqc\" (UID: \"fe797225-935d-4660-8763-8877a6eb5563\") " pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:36 crc kubenswrapper[4707]: I1204 09:53:36.751820 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe797225-935d-4660-8763-8877a6eb5563-utilities\") pod \"redhat-marketplace-ktmqc\" (UID: \"fe797225-935d-4660-8763-8877a6eb5563\") " pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:36 crc kubenswrapper[4707]: I1204 09:53:36.751880 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5mmr\" (UniqueName: \"kubernetes.io/projected/fe797225-935d-4660-8763-8877a6eb5563-kube-api-access-n5mmr\") pod \"redhat-marketplace-ktmqc\" (UID: \"fe797225-935d-4660-8763-8877a6eb5563\") " pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:36 crc kubenswrapper[4707]: I1204 09:53:36.752589 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe797225-935d-4660-8763-8877a6eb5563-catalog-content\") pod \"redhat-marketplace-ktmqc\" (UID: \"fe797225-935d-4660-8763-8877a6eb5563\") " pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:36 crc kubenswrapper[4707]: I1204 09:53:36.752637 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe797225-935d-4660-8763-8877a6eb5563-utilities\") pod \"redhat-marketplace-ktmqc\" (UID: \"fe797225-935d-4660-8763-8877a6eb5563\") " pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:36 crc kubenswrapper[4707]: I1204 09:53:36.776249 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5mmr\" (UniqueName: \"kubernetes.io/projected/fe797225-935d-4660-8763-8877a6eb5563-kube-api-access-n5mmr\") pod \"redhat-marketplace-ktmqc\" (UID: \"fe797225-935d-4660-8763-8877a6eb5563\") " pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:36 crc kubenswrapper[4707]: I1204 09:53:36.948015 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:37 crc kubenswrapper[4707]: I1204 09:53:37.964955 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ktmqc"] Dec 04 09:53:37 crc kubenswrapper[4707]: W1204 09:53:37.967723 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfe797225_935d_4660_8763_8877a6eb5563.slice/crio-cf5241d40847b048f1513f1a5327d9304c54c94603e17b57bfdad366af619573 WatchSource:0}: Error finding container cf5241d40847b048f1513f1a5327d9304c54c94603e17b57bfdad366af619573: Status 404 returned error can't find the container with id cf5241d40847b048f1513f1a5327d9304c54c94603e17b57bfdad366af619573 Dec 04 09:53:38 crc kubenswrapper[4707]: I1204 09:53:38.924565 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4rflr" event={"ID":"4564df65-15e6-48e6-9e52-f6423f696a9c","Type":"ContainerStarted","Data":"b3069583013e44167517d1e1d075bab772302f73abbd36f2c9f656b1bea27cbd"} Dec 04 09:53:38 crc kubenswrapper[4707]: I1204 09:53:38.926307 4707 generic.go:334] "Generic (PLEG): container finished" podID="fe797225-935d-4660-8763-8877a6eb5563" containerID="abfb704be3fa7d74903c10f369d36f704790719b716dc877080357c83695010c" exitCode=0 Dec 04 09:53:38 crc kubenswrapper[4707]: I1204 09:53:38.926362 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ktmqc" event={"ID":"fe797225-935d-4660-8763-8877a6eb5563","Type":"ContainerDied","Data":"abfb704be3fa7d74903c10f369d36f704790719b716dc877080357c83695010c"} Dec 04 09:53:38 crc kubenswrapper[4707]: I1204 09:53:38.926389 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ktmqc" event={"ID":"fe797225-935d-4660-8763-8877a6eb5563","Type":"ContainerStarted","Data":"cf5241d40847b048f1513f1a5327d9304c54c94603e17b57bfdad366af619573"} Dec 04 09:53:38 crc kubenswrapper[4707]: I1204 09:53:38.942618 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4rflr" podStartSLOduration=3.398543113 podStartE2EDuration="7.942595736s" podCreationTimestamp="2025-12-04 09:53:31 +0000 UTC" firstStartedPulling="2025-12-04 09:53:32.883852402 +0000 UTC m=+912.319674899" lastFinishedPulling="2025-12-04 09:53:37.427905015 +0000 UTC m=+916.863727522" observedRunningTime="2025-12-04 09:53:38.940019294 +0000 UTC m=+918.375841811" watchObservedRunningTime="2025-12-04 09:53:38.942595736 +0000 UTC m=+918.378418243" Dec 04 09:53:39 crc kubenswrapper[4707]: I1204 09:53:39.935399 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ktmqc" event={"ID":"fe797225-935d-4660-8763-8877a6eb5563","Type":"ContainerStarted","Data":"7b85f30fd8d660c537f0cfff7354440d2b092604b922e825f85812d93e167a87"} Dec 04 09:53:40 crc kubenswrapper[4707]: I1204 09:53:40.941660 4707 generic.go:334] "Generic (PLEG): container finished" podID="fe797225-935d-4660-8763-8877a6eb5563" containerID="7b85f30fd8d660c537f0cfff7354440d2b092604b922e825f85812d93e167a87" exitCode=0 Dec 04 09:53:40 crc kubenswrapper[4707]: I1204 09:53:40.941736 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ktmqc" event={"ID":"fe797225-935d-4660-8763-8877a6eb5563","Type":"ContainerDied","Data":"7b85f30fd8d660c537f0cfff7354440d2b092604b922e825f85812d93e167a87"} Dec 04 09:53:41 crc kubenswrapper[4707]: I1204 09:53:41.960982 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:41 crc kubenswrapper[4707]: I1204 09:53:41.961418 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:42 crc kubenswrapper[4707]: I1204 09:53:42.004317 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:42 crc kubenswrapper[4707]: I1204 09:53:42.999554 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:43 crc kubenswrapper[4707]: I1204 09:53:43.422274 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4rflr"] Dec 04 09:53:44 crc kubenswrapper[4707]: I1204 09:53:44.967104 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4rflr" podUID="4564df65-15e6-48e6-9e52-f6423f696a9c" containerName="registry-server" containerID="cri-o://b3069583013e44167517d1e1d075bab772302f73abbd36f2c9f656b1bea27cbd" gracePeriod=2 Dec 04 09:53:45 crc kubenswrapper[4707]: I1204 09:53:45.876001 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58"] Dec 04 09:53:45 crc kubenswrapper[4707]: I1204 09:53:45.877146 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" Dec 04 09:53:45 crc kubenswrapper[4707]: I1204 09:53:45.878990 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-zklcg" Dec 04 09:53:45 crc kubenswrapper[4707]: I1204 09:53:45.889040 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58"] Dec 04 09:53:45 crc kubenswrapper[4707]: I1204 09:53:45.978788 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkpwb\" (UniqueName: \"kubernetes.io/projected/727d9472-f299-41de-83d8-3d3d73c669e2-kube-api-access-nkpwb\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58\" (UID: \"727d9472-f299-41de-83d8-3d3d73c669e2\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" Dec 04 09:53:45 crc kubenswrapper[4707]: I1204 09:53:45.979141 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/727d9472-f299-41de-83d8-3d3d73c669e2-bundle\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58\" (UID: \"727d9472-f299-41de-83d8-3d3d73c669e2\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" Dec 04 09:53:45 crc kubenswrapper[4707]: I1204 09:53:45.979183 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/727d9472-f299-41de-83d8-3d3d73c669e2-util\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58\" (UID: \"727d9472-f299-41de-83d8-3d3d73c669e2\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" Dec 04 09:53:46 crc kubenswrapper[4707]: I1204 09:53:46.080622 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkpwb\" (UniqueName: \"kubernetes.io/projected/727d9472-f299-41de-83d8-3d3d73c669e2-kube-api-access-nkpwb\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58\" (UID: \"727d9472-f299-41de-83d8-3d3d73c669e2\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" Dec 04 09:53:46 crc kubenswrapper[4707]: I1204 09:53:46.080715 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/727d9472-f299-41de-83d8-3d3d73c669e2-bundle\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58\" (UID: \"727d9472-f299-41de-83d8-3d3d73c669e2\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" Dec 04 09:53:46 crc kubenswrapper[4707]: I1204 09:53:46.080777 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/727d9472-f299-41de-83d8-3d3d73c669e2-util\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58\" (UID: \"727d9472-f299-41de-83d8-3d3d73c669e2\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" Dec 04 09:53:46 crc kubenswrapper[4707]: I1204 09:53:46.081264 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/727d9472-f299-41de-83d8-3d3d73c669e2-util\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58\" (UID: \"727d9472-f299-41de-83d8-3d3d73c669e2\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" Dec 04 09:53:46 crc kubenswrapper[4707]: I1204 09:53:46.081405 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/727d9472-f299-41de-83d8-3d3d73c669e2-bundle\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58\" (UID: \"727d9472-f299-41de-83d8-3d3d73c669e2\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" Dec 04 09:53:46 crc kubenswrapper[4707]: I1204 09:53:46.100306 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkpwb\" (UniqueName: \"kubernetes.io/projected/727d9472-f299-41de-83d8-3d3d73c669e2-kube-api-access-nkpwb\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58\" (UID: \"727d9472-f299-41de-83d8-3d3d73c669e2\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" Dec 04 09:53:46 crc kubenswrapper[4707]: I1204 09:53:46.195442 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" Dec 04 09:53:46 crc kubenswrapper[4707]: I1204 09:53:46.402625 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58"] Dec 04 09:53:46 crc kubenswrapper[4707]: I1204 09:53:46.982970 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" event={"ID":"727d9472-f299-41de-83d8-3d3d73c669e2","Type":"ContainerStarted","Data":"e02e254a788bc6a4e8e7c21dba78c9d4c0af4187f53ff757525a41306aa5d843"} Dec 04 09:53:46 crc kubenswrapper[4707]: I1204 09:53:46.983475 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" event={"ID":"727d9472-f299-41de-83d8-3d3d73c669e2","Type":"ContainerStarted","Data":"7b3a943b1d9b8edf024cedf53bcd9562cda6210578a2bc92f9fce89a9af7bfbc"} Dec 04 09:53:46 crc kubenswrapper[4707]: I1204 09:53:46.984947 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ktmqc" event={"ID":"fe797225-935d-4660-8763-8877a6eb5563","Type":"ContainerStarted","Data":"e3afce6b989925fec3a3fed7688293ca6fc95f3044b6f682d82c5bdb743506e4"} Dec 04 09:53:46 crc kubenswrapper[4707]: I1204 09:53:46.988734 4707 generic.go:334] "Generic (PLEG): container finished" podID="4564df65-15e6-48e6-9e52-f6423f696a9c" containerID="b3069583013e44167517d1e1d075bab772302f73abbd36f2c9f656b1bea27cbd" exitCode=0 Dec 04 09:53:46 crc kubenswrapper[4707]: I1204 09:53:46.988770 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4rflr" event={"ID":"4564df65-15e6-48e6-9e52-f6423f696a9c","Type":"ContainerDied","Data":"b3069583013e44167517d1e1d075bab772302f73abbd36f2c9f656b1bea27cbd"} Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.029833 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ktmqc" podStartSLOduration=3.441890735 podStartE2EDuration="11.029792149s" podCreationTimestamp="2025-12-04 09:53:36 +0000 UTC" firstStartedPulling="2025-12-04 09:53:38.928311463 +0000 UTC m=+918.364133970" lastFinishedPulling="2025-12-04 09:53:46.516212877 +0000 UTC m=+925.952035384" observedRunningTime="2025-12-04 09:53:47.025797843 +0000 UTC m=+926.461620360" watchObservedRunningTime="2025-12-04 09:53:47.029792149 +0000 UTC m=+926.465614656" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.213801 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.412659 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4564df65-15e6-48e6-9e52-f6423f696a9c-catalog-content\") pod \"4564df65-15e6-48e6-9e52-f6423f696a9c\" (UID: \"4564df65-15e6-48e6-9e52-f6423f696a9c\") " Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.412711 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqv46\" (UniqueName: \"kubernetes.io/projected/4564df65-15e6-48e6-9e52-f6423f696a9c-kube-api-access-zqv46\") pod \"4564df65-15e6-48e6-9e52-f6423f696a9c\" (UID: \"4564df65-15e6-48e6-9e52-f6423f696a9c\") " Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.412825 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4564df65-15e6-48e6-9e52-f6423f696a9c-utilities\") pod \"4564df65-15e6-48e6-9e52-f6423f696a9c\" (UID: \"4564df65-15e6-48e6-9e52-f6423f696a9c\") " Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.413873 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4564df65-15e6-48e6-9e52-f6423f696a9c-utilities" (OuterVolumeSpecName: "utilities") pod "4564df65-15e6-48e6-9e52-f6423f696a9c" (UID: "4564df65-15e6-48e6-9e52-f6423f696a9c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.423831 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4564df65-15e6-48e6-9e52-f6423f696a9c-kube-api-access-zqv46" (OuterVolumeSpecName: "kube-api-access-zqv46") pod "4564df65-15e6-48e6-9e52-f6423f696a9c" (UID: "4564df65-15e6-48e6-9e52-f6423f696a9c"). InnerVolumeSpecName "kube-api-access-zqv46". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.470040 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4564df65-15e6-48e6-9e52-f6423f696a9c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4564df65-15e6-48e6-9e52-f6423f696a9c" (UID: "4564df65-15e6-48e6-9e52-f6423f696a9c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.514145 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4564df65-15e6-48e6-9e52-f6423f696a9c-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.514182 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4564df65-15e6-48e6-9e52-f6423f696a9c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.514195 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqv46\" (UniqueName: \"kubernetes.io/projected/4564df65-15e6-48e6-9e52-f6423f696a9c-kube-api-access-zqv46\") on node \"crc\" DevicePath \"\"" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.831310 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dmv5j"] Dec 04 09:53:47 crc kubenswrapper[4707]: E1204 09:53:47.831595 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4564df65-15e6-48e6-9e52-f6423f696a9c" containerName="extract-content" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.831617 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="4564df65-15e6-48e6-9e52-f6423f696a9c" containerName="extract-content" Dec 04 09:53:47 crc kubenswrapper[4707]: E1204 09:53:47.831629 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4564df65-15e6-48e6-9e52-f6423f696a9c" containerName="extract-utilities" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.831640 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="4564df65-15e6-48e6-9e52-f6423f696a9c" containerName="extract-utilities" Dec 04 09:53:47 crc kubenswrapper[4707]: E1204 09:53:47.831664 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4564df65-15e6-48e6-9e52-f6423f696a9c" containerName="registry-server" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.831670 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="4564df65-15e6-48e6-9e52-f6423f696a9c" containerName="registry-server" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.831777 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="4564df65-15e6-48e6-9e52-f6423f696a9c" containerName="registry-server" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.832762 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.846150 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dmv5j"] Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.995623 4707 generic.go:334] "Generic (PLEG): container finished" podID="727d9472-f299-41de-83d8-3d3d73c669e2" containerID="e02e254a788bc6a4e8e7c21dba78c9d4c0af4187f53ff757525a41306aa5d843" exitCode=0 Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.995689 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" event={"ID":"727d9472-f299-41de-83d8-3d3d73c669e2","Type":"ContainerDied","Data":"e02e254a788bc6a4e8e7c21dba78c9d4c0af4187f53ff757525a41306aa5d843"} Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.999120 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4rflr" Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.999163 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4rflr" event={"ID":"4564df65-15e6-48e6-9e52-f6423f696a9c","Type":"ContainerDied","Data":"01726dc60fe1fd2d167d8939aeb31a6b92091de93054827664ea2a052904e124"} Dec 04 09:53:47 crc kubenswrapper[4707]: I1204 09:53:47.999195 4707 scope.go:117] "RemoveContainer" containerID="b3069583013e44167517d1e1d075bab772302f73abbd36f2c9f656b1bea27cbd" Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.022278 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fba6010-a6b6-440f-a293-a9d7a3c85dab-catalog-content\") pod \"community-operators-dmv5j\" (UID: \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\") " pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.022416 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvb8f\" (UniqueName: \"kubernetes.io/projected/8fba6010-a6b6-440f-a293-a9d7a3c85dab-kube-api-access-jvb8f\") pod \"community-operators-dmv5j\" (UID: \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\") " pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.022518 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fba6010-a6b6-440f-a293-a9d7a3c85dab-utilities\") pod \"community-operators-dmv5j\" (UID: \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\") " pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.025217 4707 scope.go:117] "RemoveContainer" containerID="cddbfadef4f54ca218e6e5ae2bb312901e20bb738b76c49e602e0dac15deed28" Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.042535 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4rflr"] Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.051863 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4rflr"] Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.053098 4707 scope.go:117] "RemoveContainer" containerID="fdf455f9eb2469c731f5a7cac4bc1e2b57c928cf8c13d126c4ad585354d6efb5" Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.123577 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fba6010-a6b6-440f-a293-a9d7a3c85dab-catalog-content\") pod \"community-operators-dmv5j\" (UID: \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\") " pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.123631 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvb8f\" (UniqueName: \"kubernetes.io/projected/8fba6010-a6b6-440f-a293-a9d7a3c85dab-kube-api-access-jvb8f\") pod \"community-operators-dmv5j\" (UID: \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\") " pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.123713 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fba6010-a6b6-440f-a293-a9d7a3c85dab-utilities\") pod \"community-operators-dmv5j\" (UID: \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\") " pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.124148 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fba6010-a6b6-440f-a293-a9d7a3c85dab-utilities\") pod \"community-operators-dmv5j\" (UID: \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\") " pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.124363 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fba6010-a6b6-440f-a293-a9d7a3c85dab-catalog-content\") pod \"community-operators-dmv5j\" (UID: \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\") " pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.146570 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvb8f\" (UniqueName: \"kubernetes.io/projected/8fba6010-a6b6-440f-a293-a9d7a3c85dab-kube-api-access-jvb8f\") pod \"community-operators-dmv5j\" (UID: \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\") " pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.150165 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.419716 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dmv5j"] Dec 04 09:53:48 crc kubenswrapper[4707]: W1204 09:53:48.427420 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8fba6010_a6b6_440f_a293_a9d7a3c85dab.slice/crio-0bede0959a7853266e881badfe5c1c33cfa8a3f33a0fdede04075b9d55f8f513 WatchSource:0}: Error finding container 0bede0959a7853266e881badfe5c1c33cfa8a3f33a0fdede04075b9d55f8f513: Status 404 returned error can't find the container with id 0bede0959a7853266e881badfe5c1c33cfa8a3f33a0fdede04075b9d55f8f513 Dec 04 09:53:48 crc kubenswrapper[4707]: I1204 09:53:48.852116 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4564df65-15e6-48e6-9e52-f6423f696a9c" path="/var/lib/kubelet/pods/4564df65-15e6-48e6-9e52-f6423f696a9c/volumes" Dec 04 09:53:49 crc kubenswrapper[4707]: I1204 09:53:49.009140 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" event={"ID":"727d9472-f299-41de-83d8-3d3d73c669e2","Type":"ContainerStarted","Data":"227627d594ad40441a26d1664b3bef384e48816c51cb19f672f02426568bb9aa"} Dec 04 09:53:49 crc kubenswrapper[4707]: I1204 09:53:49.011284 4707 generic.go:334] "Generic (PLEG): container finished" podID="8fba6010-a6b6-440f-a293-a9d7a3c85dab" containerID="1bb56015c247cd94bb664ff903a14b9aeae43f1221b43ce0120bcc91841f196b" exitCode=0 Dec 04 09:53:49 crc kubenswrapper[4707]: I1204 09:53:49.011499 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmv5j" event={"ID":"8fba6010-a6b6-440f-a293-a9d7a3c85dab","Type":"ContainerDied","Data":"1bb56015c247cd94bb664ff903a14b9aeae43f1221b43ce0120bcc91841f196b"} Dec 04 09:53:49 crc kubenswrapper[4707]: I1204 09:53:49.011547 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmv5j" event={"ID":"8fba6010-a6b6-440f-a293-a9d7a3c85dab","Type":"ContainerStarted","Data":"0bede0959a7853266e881badfe5c1c33cfa8a3f33a0fdede04075b9d55f8f513"} Dec 04 09:53:50 crc kubenswrapper[4707]: I1204 09:53:50.021555 4707 generic.go:334] "Generic (PLEG): container finished" podID="727d9472-f299-41de-83d8-3d3d73c669e2" containerID="227627d594ad40441a26d1664b3bef384e48816c51cb19f672f02426568bb9aa" exitCode=0 Dec 04 09:53:50 crc kubenswrapper[4707]: I1204 09:53:50.021906 4707 generic.go:334] "Generic (PLEG): container finished" podID="727d9472-f299-41de-83d8-3d3d73c669e2" containerID="eb308450cd911f4b7b823794d6516a3f6e2c12535207e9f3d7d1f11d0fbe407f" exitCode=0 Dec 04 09:53:50 crc kubenswrapper[4707]: I1204 09:53:50.021996 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" event={"ID":"727d9472-f299-41de-83d8-3d3d73c669e2","Type":"ContainerDied","Data":"227627d594ad40441a26d1664b3bef384e48816c51cb19f672f02426568bb9aa"} Dec 04 09:53:50 crc kubenswrapper[4707]: I1204 09:53:50.022027 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" event={"ID":"727d9472-f299-41de-83d8-3d3d73c669e2","Type":"ContainerDied","Data":"eb308450cd911f4b7b823794d6516a3f6e2c12535207e9f3d7d1f11d0fbe407f"} Dec 04 09:53:50 crc kubenswrapper[4707]: I1204 09:53:50.023486 4707 generic.go:334] "Generic (PLEG): container finished" podID="8fba6010-a6b6-440f-a293-a9d7a3c85dab" containerID="d30360bad2e3b9c38a1a9fe7efdae0200614a66f1431b4ce0dee522dadff1740" exitCode=0 Dec 04 09:53:50 crc kubenswrapper[4707]: I1204 09:53:50.023514 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmv5j" event={"ID":"8fba6010-a6b6-440f-a293-a9d7a3c85dab","Type":"ContainerDied","Data":"d30360bad2e3b9c38a1a9fe7efdae0200614a66f1431b4ce0dee522dadff1740"} Dec 04 09:53:51 crc kubenswrapper[4707]: I1204 09:53:51.030476 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmv5j" event={"ID":"8fba6010-a6b6-440f-a293-a9d7a3c85dab","Type":"ContainerStarted","Data":"70e1a33477aa55a36850505c6901f6cc76b86d81dda6bd0fb9dab60bf2a016eb"} Dec 04 09:53:51 crc kubenswrapper[4707]: I1204 09:53:51.050092 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dmv5j" podStartSLOduration=2.650074913 podStartE2EDuration="4.050068506s" podCreationTimestamp="2025-12-04 09:53:47 +0000 UTC" firstStartedPulling="2025-12-04 09:53:49.013185509 +0000 UTC m=+928.449008016" lastFinishedPulling="2025-12-04 09:53:50.413179102 +0000 UTC m=+929.849001609" observedRunningTime="2025-12-04 09:53:51.048734754 +0000 UTC m=+930.484557281" watchObservedRunningTime="2025-12-04 09:53:51.050068506 +0000 UTC m=+930.485891013" Dec 04 09:53:51 crc kubenswrapper[4707]: I1204 09:53:51.650779 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" Dec 04 09:53:51 crc kubenswrapper[4707]: I1204 09:53:51.824173 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkpwb\" (UniqueName: \"kubernetes.io/projected/727d9472-f299-41de-83d8-3d3d73c669e2-kube-api-access-nkpwb\") pod \"727d9472-f299-41de-83d8-3d3d73c669e2\" (UID: \"727d9472-f299-41de-83d8-3d3d73c669e2\") " Dec 04 09:53:51 crc kubenswrapper[4707]: I1204 09:53:51.824251 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/727d9472-f299-41de-83d8-3d3d73c669e2-util\") pod \"727d9472-f299-41de-83d8-3d3d73c669e2\" (UID: \"727d9472-f299-41de-83d8-3d3d73c669e2\") " Dec 04 09:53:51 crc kubenswrapper[4707]: I1204 09:53:51.824300 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/727d9472-f299-41de-83d8-3d3d73c669e2-bundle\") pod \"727d9472-f299-41de-83d8-3d3d73c669e2\" (UID: \"727d9472-f299-41de-83d8-3d3d73c669e2\") " Dec 04 09:53:51 crc kubenswrapper[4707]: I1204 09:53:51.825286 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/727d9472-f299-41de-83d8-3d3d73c669e2-bundle" (OuterVolumeSpecName: "bundle") pod "727d9472-f299-41de-83d8-3d3d73c669e2" (UID: "727d9472-f299-41de-83d8-3d3d73c669e2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:53:51 crc kubenswrapper[4707]: I1204 09:53:51.829176 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/727d9472-f299-41de-83d8-3d3d73c669e2-kube-api-access-nkpwb" (OuterVolumeSpecName: "kube-api-access-nkpwb") pod "727d9472-f299-41de-83d8-3d3d73c669e2" (UID: "727d9472-f299-41de-83d8-3d3d73c669e2"). InnerVolumeSpecName "kube-api-access-nkpwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:53:51 crc kubenswrapper[4707]: I1204 09:53:51.840702 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/727d9472-f299-41de-83d8-3d3d73c669e2-util" (OuterVolumeSpecName: "util") pod "727d9472-f299-41de-83d8-3d3d73c669e2" (UID: "727d9472-f299-41de-83d8-3d3d73c669e2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:53:51 crc kubenswrapper[4707]: I1204 09:53:51.926307 4707 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/727d9472-f299-41de-83d8-3d3d73c669e2-util\") on node \"crc\" DevicePath \"\"" Dec 04 09:53:51 crc kubenswrapper[4707]: I1204 09:53:51.926358 4707 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/727d9472-f299-41de-83d8-3d3d73c669e2-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:53:51 crc kubenswrapper[4707]: I1204 09:53:51.926367 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkpwb\" (UniqueName: \"kubernetes.io/projected/727d9472-f299-41de-83d8-3d3d73c669e2-kube-api-access-nkpwb\") on node \"crc\" DevicePath \"\"" Dec 04 09:53:52 crc kubenswrapper[4707]: I1204 09:53:52.037685 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" event={"ID":"727d9472-f299-41de-83d8-3d3d73c669e2","Type":"ContainerDied","Data":"7b3a943b1d9b8edf024cedf53bcd9562cda6210578a2bc92f9fce89a9af7bfbc"} Dec 04 09:53:52 crc kubenswrapper[4707]: I1204 09:53:52.037736 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b3a943b1d9b8edf024cedf53bcd9562cda6210578a2bc92f9fce89a9af7bfbc" Dec 04 09:53:52 crc kubenswrapper[4707]: I1204 09:53:52.037762 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58" Dec 04 09:53:56 crc kubenswrapper[4707]: I1204 09:53:56.948718 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:56 crc kubenswrapper[4707]: I1204 09:53:56.950145 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:56 crc kubenswrapper[4707]: I1204 09:53:56.994545 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.102781 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.852249 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/openstack-galera-0"] Dec 04 09:53:57 crc kubenswrapper[4707]: E1204 09:53:57.852499 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="727d9472-f299-41de-83d8-3d3d73c669e2" containerName="util" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.852512 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="727d9472-f299-41de-83d8-3d3d73c669e2" containerName="util" Dec 04 09:53:57 crc kubenswrapper[4707]: E1204 09:53:57.852520 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="727d9472-f299-41de-83d8-3d3d73c669e2" containerName="extract" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.852527 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="727d9472-f299-41de-83d8-3d3d73c669e2" containerName="extract" Dec 04 09:53:57 crc kubenswrapper[4707]: E1204 09:53:57.852547 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="727d9472-f299-41de-83d8-3d3d73c669e2" containerName="pull" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.852554 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="727d9472-f299-41de-83d8-3d3d73c669e2" containerName="pull" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.852678 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="727d9472-f299-41de-83d8-3d3d73c669e2" containerName="extract" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.853403 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.855061 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"manila-kuttl-tests"/"openstack-config-data" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.855105 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"galera-openstack-dockercfg-t8dpj" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.855310 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"manila-kuttl-tests"/"kube-root-ca.crt" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.855451 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"manila-kuttl-tests"/"openshift-service-ca.crt" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.857470 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"manila-kuttl-tests"/"openstack-scripts" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.862539 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/openstack-galera-0"] Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.881925 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/openstack-galera-2"] Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.882857 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.926736 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/openstack-galera-2"] Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.949613 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/openstack-galera-1"] Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.955485 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:57 crc kubenswrapper[4707]: I1204 09:53:57.974663 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/openstack-galera-1"] Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.016076 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-operator-scripts\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.016232 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3b373699-1303-4b1a-914d-7764376f5b38-config-data-generated\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.016300 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpdbf\" (UniqueName: \"kubernetes.io/projected/3b373699-1303-4b1a-914d-7764376f5b38-kube-api-access-jpdbf\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.016357 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-config-data-default\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.016394 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-kolla-config\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.016529 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.016632 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79cd0cfb-7c57-4a38-97c7-a40a24097d29-config-data-generated\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.016753 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-config-data-default\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.016913 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-operator-scripts\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.016989 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-kolla-config\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.017019 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.017192 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfr9n\" (UniqueName: \"kubernetes.io/projected/79cd0cfb-7c57-4a38-97c7-a40a24097d29-kube-api-access-bfr9n\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.118524 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-kolla-config\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.118581 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.118604 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79cd0cfb-7c57-4a38-97c7-a40a24097d29-config-data-generated\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.118772 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2538c764-a696-4ce4-95fa-58c782e0b71f-config-data-generated\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.118912 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-config-data-default\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.118952 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.118988 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-config-data-default\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.118992 4707 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") device mount path \"/mnt/openstack/pv04\"" pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.119015 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-operator-scripts\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.119050 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-kolla-config\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.119079 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.119149 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwbhw\" (UniqueName: \"kubernetes.io/projected/2538c764-a696-4ce4-95fa-58c782e0b71f-kube-api-access-qwbhw\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.119177 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfr9n\" (UniqueName: \"kubernetes.io/projected/79cd0cfb-7c57-4a38-97c7-a40a24097d29-kube-api-access-bfr9n\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.119197 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-operator-scripts\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.119231 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-operator-scripts\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.119297 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-kolla-config\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.119365 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3b373699-1303-4b1a-914d-7764376f5b38-config-data-generated\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.119395 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-kolla-config\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.119404 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpdbf\" (UniqueName: \"kubernetes.io/projected/3b373699-1303-4b1a-914d-7764376f5b38-kube-api-access-jpdbf\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.119511 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-config-data-default\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.119653 4707 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") device mount path \"/mnt/openstack/pv02\"" pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.119850 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-kolla-config\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.120086 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3b373699-1303-4b1a-914d-7764376f5b38-config-data-generated\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.120108 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-config-data-default\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.120522 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-config-data-default\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.121249 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-operator-scripts\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.121376 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79cd0cfb-7c57-4a38-97c7-a40a24097d29-config-data-generated\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.121894 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-operator-scripts\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.136799 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.137631 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.138026 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpdbf\" (UniqueName: \"kubernetes.io/projected/3b373699-1303-4b1a-914d-7764376f5b38-kube-api-access-jpdbf\") pod \"openstack-galera-2\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.144298 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfr9n\" (UniqueName: \"kubernetes.io/projected/79cd0cfb-7c57-4a38-97c7-a40a24097d29-kube-api-access-bfr9n\") pod \"openstack-galera-0\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.151385 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.151535 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.170314 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.203309 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.220389 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-kolla-config\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.220510 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2538c764-a696-4ce4-95fa-58c782e0b71f-config-data-generated\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.220548 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.220575 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-config-data-default\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.220616 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwbhw\" (UniqueName: \"kubernetes.io/projected/2538c764-a696-4ce4-95fa-58c782e0b71f-kube-api-access-qwbhw\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.220638 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-operator-scripts\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.221775 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2538c764-a696-4ce4-95fa-58c782e0b71f-config-data-generated\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.222345 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-config-data-default\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.222396 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-operator-scripts\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.222594 4707 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") device mount path \"/mnt/openstack/pv09\"" pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.222613 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-kolla-config\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.230301 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.253900 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwbhw\" (UniqueName: \"kubernetes.io/projected/2538c764-a696-4ce4-95fa-58c782e0b71f-kube-api-access-qwbhw\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.254025 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-1\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.272295 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.358766 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc"] Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.360042 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.365818 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-service-cert" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.366022 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-c6njm" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.378507 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc"] Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.525262 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dee882a8-abdc-45cb-b451-121bd9579e0f-apiservice-cert\") pod \"infra-operator-controller-manager-5fcdc5dbf4-s5hkc\" (UID: \"dee882a8-abdc-45cb-b451-121bd9579e0f\") " pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.525374 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dee882a8-abdc-45cb-b451-121bd9579e0f-webhook-cert\") pod \"infra-operator-controller-manager-5fcdc5dbf4-s5hkc\" (UID: \"dee882a8-abdc-45cb-b451-121bd9579e0f\") " pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.525449 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfr2j\" (UniqueName: \"kubernetes.io/projected/dee882a8-abdc-45cb-b451-121bd9579e0f-kube-api-access-rfr2j\") pod \"infra-operator-controller-manager-5fcdc5dbf4-s5hkc\" (UID: \"dee882a8-abdc-45cb-b451-121bd9579e0f\") " pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.627011 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfr2j\" (UniqueName: \"kubernetes.io/projected/dee882a8-abdc-45cb-b451-121bd9579e0f-kube-api-access-rfr2j\") pod \"infra-operator-controller-manager-5fcdc5dbf4-s5hkc\" (UID: \"dee882a8-abdc-45cb-b451-121bd9579e0f\") " pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.628186 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dee882a8-abdc-45cb-b451-121bd9579e0f-apiservice-cert\") pod \"infra-operator-controller-manager-5fcdc5dbf4-s5hkc\" (UID: \"dee882a8-abdc-45cb-b451-121bd9579e0f\") " pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.628301 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dee882a8-abdc-45cb-b451-121bd9579e0f-webhook-cert\") pod \"infra-operator-controller-manager-5fcdc5dbf4-s5hkc\" (UID: \"dee882a8-abdc-45cb-b451-121bd9579e0f\") " pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.640808 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dee882a8-abdc-45cb-b451-121bd9579e0f-webhook-cert\") pod \"infra-operator-controller-manager-5fcdc5dbf4-s5hkc\" (UID: \"dee882a8-abdc-45cb-b451-121bd9579e0f\") " pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.648476 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dee882a8-abdc-45cb-b451-121bd9579e0f-apiservice-cert\") pod \"infra-operator-controller-manager-5fcdc5dbf4-s5hkc\" (UID: \"dee882a8-abdc-45cb-b451-121bd9579e0f\") " pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.655419 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfr2j\" (UniqueName: \"kubernetes.io/projected/dee882a8-abdc-45cb-b451-121bd9579e0f-kube-api-access-rfr2j\") pod \"infra-operator-controller-manager-5fcdc5dbf4-s5hkc\" (UID: \"dee882a8-abdc-45cb-b451-121bd9579e0f\") " pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.700027 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.824723 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ktmqc"] Dec 04 09:53:58 crc kubenswrapper[4707]: I1204 09:53:58.981569 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/openstack-galera-0"] Dec 04 09:53:58 crc kubenswrapper[4707]: W1204 09:53:58.991790 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79cd0cfb_7c57_4a38_97c7_a40a24097d29.slice/crio-6cdf2aade72c69e6e1657f00835a392b30a87806a2a319286546fb49321a6b36 WatchSource:0}: Error finding container 6cdf2aade72c69e6e1657f00835a392b30a87806a2a319286546fb49321a6b36: Status 404 returned error can't find the container with id 6cdf2aade72c69e6e1657f00835a392b30a87806a2a319286546fb49321a6b36 Dec 04 09:53:59 crc kubenswrapper[4707]: I1204 09:53:59.080258 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-0" event={"ID":"79cd0cfb-7c57-4a38-97c7-a40a24097d29","Type":"ContainerStarted","Data":"6cdf2aade72c69e6e1657f00835a392b30a87806a2a319286546fb49321a6b36"} Dec 04 09:53:59 crc kubenswrapper[4707]: I1204 09:53:59.081542 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ktmqc" podUID="fe797225-935d-4660-8763-8877a6eb5563" containerName="registry-server" containerID="cri-o://e3afce6b989925fec3a3fed7688293ca6fc95f3044b6f682d82c5bdb743506e4" gracePeriod=2 Dec 04 09:53:59 crc kubenswrapper[4707]: I1204 09:53:59.152892 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:53:59 crc kubenswrapper[4707]: I1204 09:53:59.208972 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/openstack-galera-2"] Dec 04 09:53:59 crc kubenswrapper[4707]: I1204 09:53:59.348539 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/openstack-galera-1"] Dec 04 09:53:59 crc kubenswrapper[4707]: I1204 09:53:59.506555 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc"] Dec 04 09:53:59 crc kubenswrapper[4707]: W1204 09:53:59.511296 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddee882a8_abdc_45cb_b451_121bd9579e0f.slice/crio-4ea83e6e3d4d9e45a69f585fa4ddb8ed8b12f3531c5d426ec389c9c24fe6292b WatchSource:0}: Error finding container 4ea83e6e3d4d9e45a69f585fa4ddb8ed8b12f3531c5d426ec389c9c24fe6292b: Status 404 returned error can't find the container with id 4ea83e6e3d4d9e45a69f585fa4ddb8ed8b12f3531c5d426ec389c9c24fe6292b Dec 04 09:54:00 crc kubenswrapper[4707]: I1204 09:54:00.095240 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" event={"ID":"dee882a8-abdc-45cb-b451-121bd9579e0f","Type":"ContainerStarted","Data":"4ea83e6e3d4d9e45a69f585fa4ddb8ed8b12f3531c5d426ec389c9c24fe6292b"} Dec 04 09:54:00 crc kubenswrapper[4707]: I1204 09:54:00.101541 4707 generic.go:334] "Generic (PLEG): container finished" podID="fe797225-935d-4660-8763-8877a6eb5563" containerID="e3afce6b989925fec3a3fed7688293ca6fc95f3044b6f682d82c5bdb743506e4" exitCode=0 Dec 04 09:54:00 crc kubenswrapper[4707]: I1204 09:54:00.101601 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ktmqc" event={"ID":"fe797225-935d-4660-8763-8877a6eb5563","Type":"ContainerDied","Data":"e3afce6b989925fec3a3fed7688293ca6fc95f3044b6f682d82c5bdb743506e4"} Dec 04 09:54:00 crc kubenswrapper[4707]: I1204 09:54:00.103517 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-2" event={"ID":"3b373699-1303-4b1a-914d-7764376f5b38","Type":"ContainerStarted","Data":"81aea55eb079371032df154c113d28917c33fb500c4298c482209697d5a04eee"} Dec 04 09:54:00 crc kubenswrapper[4707]: I1204 09:54:00.104522 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-1" event={"ID":"2538c764-a696-4ce4-95fa-58c782e0b71f","Type":"ContainerStarted","Data":"a8d628b48c7a97e6a71fa0bded88744fe494742b8ed739c66e73424b4529e7cb"} Dec 04 09:54:00 crc kubenswrapper[4707]: I1204 09:54:00.975639 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.077094 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe797225-935d-4660-8763-8877a6eb5563-utilities\") pod \"fe797225-935d-4660-8763-8877a6eb5563\" (UID: \"fe797225-935d-4660-8763-8877a6eb5563\") " Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.077216 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe797225-935d-4660-8763-8877a6eb5563-catalog-content\") pod \"fe797225-935d-4660-8763-8877a6eb5563\" (UID: \"fe797225-935d-4660-8763-8877a6eb5563\") " Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.077250 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5mmr\" (UniqueName: \"kubernetes.io/projected/fe797225-935d-4660-8763-8877a6eb5563-kube-api-access-n5mmr\") pod \"fe797225-935d-4660-8763-8877a6eb5563\" (UID: \"fe797225-935d-4660-8763-8877a6eb5563\") " Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.077923 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe797225-935d-4660-8763-8877a6eb5563-utilities" (OuterVolumeSpecName: "utilities") pod "fe797225-935d-4660-8763-8877a6eb5563" (UID: "fe797225-935d-4660-8763-8877a6eb5563"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.093204 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe797225-935d-4660-8763-8877a6eb5563-kube-api-access-n5mmr" (OuterVolumeSpecName: "kube-api-access-n5mmr") pod "fe797225-935d-4660-8763-8877a6eb5563" (UID: "fe797225-935d-4660-8763-8877a6eb5563"). InnerVolumeSpecName "kube-api-access-n5mmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.102906 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe797225-935d-4660-8763-8877a6eb5563-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fe797225-935d-4660-8763-8877a6eb5563" (UID: "fe797225-935d-4660-8763-8877a6eb5563"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.120864 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ktmqc" event={"ID":"fe797225-935d-4660-8763-8877a6eb5563","Type":"ContainerDied","Data":"cf5241d40847b048f1513f1a5327d9304c54c94603e17b57bfdad366af619573"} Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.120926 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ktmqc" Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.120930 4707 scope.go:117] "RemoveContainer" containerID="e3afce6b989925fec3a3fed7688293ca6fc95f3044b6f682d82c5bdb743506e4" Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.154405 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ktmqc"] Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.162031 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ktmqc"] Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.179473 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe797225-935d-4660-8763-8877a6eb5563-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.179507 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5mmr\" (UniqueName: \"kubernetes.io/projected/fe797225-935d-4660-8763-8877a6eb5563-kube-api-access-n5mmr\") on node \"crc\" DevicePath \"\"" Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.179549 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe797225-935d-4660-8763-8877a6eb5563-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.207714 4707 scope.go:117] "RemoveContainer" containerID="7b85f30fd8d660c537f0cfff7354440d2b092604b922e825f85812d93e167a87" Dec 04 09:54:01 crc kubenswrapper[4707]: I1204 09:54:01.243932 4707 scope.go:117] "RemoveContainer" containerID="abfb704be3fa7d74903c10f369d36f704790719b716dc877080357c83695010c" Dec 04 09:54:02 crc kubenswrapper[4707]: I1204 09:54:02.860120 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe797225-935d-4660-8763-8877a6eb5563" path="/var/lib/kubelet/pods/fe797225-935d-4660-8763-8877a6eb5563/volumes" Dec 04 09:54:03 crc kubenswrapper[4707]: I1204 09:54:03.036970 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dmv5j"] Dec 04 09:54:03 crc kubenswrapper[4707]: I1204 09:54:03.037644 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dmv5j" podUID="8fba6010-a6b6-440f-a293-a9d7a3c85dab" containerName="registry-server" containerID="cri-o://70e1a33477aa55a36850505c6901f6cc76b86d81dda6bd0fb9dab60bf2a016eb" gracePeriod=2 Dec 04 09:54:04 crc kubenswrapper[4707]: I1204 09:54:04.224921 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" event={"ID":"dee882a8-abdc-45cb-b451-121bd9579e0f","Type":"ContainerStarted","Data":"c8bbf20d55964d3101a974ead8aed6aab528464e5701ea73c2d46c21aa7b740b"} Dec 04 09:54:04 crc kubenswrapper[4707]: I1204 09:54:04.234797 4707 generic.go:334] "Generic (PLEG): container finished" podID="8fba6010-a6b6-440f-a293-a9d7a3c85dab" containerID="70e1a33477aa55a36850505c6901f6cc76b86d81dda6bd0fb9dab60bf2a016eb" exitCode=0 Dec 04 09:54:04 crc kubenswrapper[4707]: I1204 09:54:04.234843 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmv5j" event={"ID":"8fba6010-a6b6-440f-a293-a9d7a3c85dab","Type":"ContainerDied","Data":"70e1a33477aa55a36850505c6901f6cc76b86d81dda6bd0fb9dab60bf2a016eb"} Dec 04 09:54:08 crc kubenswrapper[4707]: E1204 09:54:08.150837 4707 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 70e1a33477aa55a36850505c6901f6cc76b86d81dda6bd0fb9dab60bf2a016eb is running failed: container process not found" containerID="70e1a33477aa55a36850505c6901f6cc76b86d81dda6bd0fb9dab60bf2a016eb" cmd=["grpc_health_probe","-addr=:50051"] Dec 04 09:54:08 crc kubenswrapper[4707]: E1204 09:54:08.151559 4707 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 70e1a33477aa55a36850505c6901f6cc76b86d81dda6bd0fb9dab60bf2a016eb is running failed: container process not found" containerID="70e1a33477aa55a36850505c6901f6cc76b86d81dda6bd0fb9dab60bf2a016eb" cmd=["grpc_health_probe","-addr=:50051"] Dec 04 09:54:08 crc kubenswrapper[4707]: E1204 09:54:08.151895 4707 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 70e1a33477aa55a36850505c6901f6cc76b86d81dda6bd0fb9dab60bf2a016eb is running failed: container process not found" containerID="70e1a33477aa55a36850505c6901f6cc76b86d81dda6bd0fb9dab60bf2a016eb" cmd=["grpc_health_probe","-addr=:50051"] Dec 04 09:54:08 crc kubenswrapper[4707]: E1204 09:54:08.151931 4707 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 70e1a33477aa55a36850505c6901f6cc76b86d81dda6bd0fb9dab60bf2a016eb is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-dmv5j" podUID="8fba6010-a6b6-440f-a293-a9d7a3c85dab" containerName="registry-server" Dec 04 09:54:16 crc kubenswrapper[4707]: E1204 09:54:16.720218 4707 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:d9a2d8b19d3de4658dd26a2e781d00002e937738bfe6a1d0cf6c68c015085f4a" Dec 04 09:54:16 crc kubenswrapper[4707]: E1204 09:54:16.721120 4707 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:d9a2d8b19d3de4658dd26a2e781d00002e937738bfe6a1d0cf6c68c015085f4a,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jpdbf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-2_manila-kuttl-tests(3b373699-1303-4b1a-914d-7764376f5b38): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 09:54:16 crc kubenswrapper[4707]: E1204 09:54:16.722328 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="manila-kuttl-tests/openstack-galera-2" podUID="3b373699-1303-4b1a-914d-7764376f5b38" Dec 04 09:54:16 crc kubenswrapper[4707]: I1204 09:54:16.732695 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:54:16 crc kubenswrapper[4707]: E1204 09:54:16.744306 4707 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:d9a2d8b19d3de4658dd26a2e781d00002e937738bfe6a1d0cf6c68c015085f4a" Dec 04 09:54:16 crc kubenswrapper[4707]: E1204 09:54:16.744511 4707 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:d9a2d8b19d3de4658dd26a2e781d00002e937738bfe6a1d0cf6c68c015085f4a,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qwbhw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-1_manila-kuttl-tests(2538c764-a696-4ce4-95fa-58c782e0b71f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 09:54:16 crc kubenswrapper[4707]: E1204 09:54:16.745705 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="manila-kuttl-tests/openstack-galera-1" podUID="2538c764-a696-4ce4-95fa-58c782e0b71f" Dec 04 09:54:16 crc kubenswrapper[4707]: E1204 09:54:16.774696 4707 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:d9a2d8b19d3de4658dd26a2e781d00002e937738bfe6a1d0cf6c68c015085f4a" Dec 04 09:54:16 crc kubenswrapper[4707]: E1204 09:54:16.774872 4707 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:d9a2d8b19d3de4658dd26a2e781d00002e937738bfe6a1d0cf6c68c015085f4a,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bfr9n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_manila-kuttl-tests(79cd0cfb-7c57-4a38-97c7-a40a24097d29): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 09:54:16 crc kubenswrapper[4707]: E1204 09:54:16.776429 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="manila-kuttl-tests/openstack-galera-0" podUID="79cd0cfb-7c57-4a38-97c7-a40a24097d29" Dec 04 09:54:16 crc kubenswrapper[4707]: I1204 09:54:16.849246 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fba6010-a6b6-440f-a293-a9d7a3c85dab-utilities\") pod \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\" (UID: \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\") " Dec 04 09:54:16 crc kubenswrapper[4707]: I1204 09:54:16.849321 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvb8f\" (UniqueName: \"kubernetes.io/projected/8fba6010-a6b6-440f-a293-a9d7a3c85dab-kube-api-access-jvb8f\") pod \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\" (UID: \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\") " Dec 04 09:54:16 crc kubenswrapper[4707]: I1204 09:54:16.849400 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fba6010-a6b6-440f-a293-a9d7a3c85dab-catalog-content\") pod \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\" (UID: \"8fba6010-a6b6-440f-a293-a9d7a3c85dab\") " Dec 04 09:54:16 crc kubenswrapper[4707]: I1204 09:54:16.850657 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fba6010-a6b6-440f-a293-a9d7a3c85dab-utilities" (OuterVolumeSpecName: "utilities") pod "8fba6010-a6b6-440f-a293-a9d7a3c85dab" (UID: "8fba6010-a6b6-440f-a293-a9d7a3c85dab"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:54:16 crc kubenswrapper[4707]: I1204 09:54:16.858385 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fba6010-a6b6-440f-a293-a9d7a3c85dab-kube-api-access-jvb8f" (OuterVolumeSpecName: "kube-api-access-jvb8f") pod "8fba6010-a6b6-440f-a293-a9d7a3c85dab" (UID: "8fba6010-a6b6-440f-a293-a9d7a3c85dab"). InnerVolumeSpecName "kube-api-access-jvb8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:54:16 crc kubenswrapper[4707]: I1204 09:54:16.909859 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fba6010-a6b6-440f-a293-a9d7a3c85dab-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8fba6010-a6b6-440f-a293-a9d7a3c85dab" (UID: "8fba6010-a6b6-440f-a293-a9d7a3c85dab"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:54:16 crc kubenswrapper[4707]: I1204 09:54:16.950930 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fba6010-a6b6-440f-a293-a9d7a3c85dab-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 09:54:16 crc kubenswrapper[4707]: I1204 09:54:16.950968 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvb8f\" (UniqueName: \"kubernetes.io/projected/8fba6010-a6b6-440f-a293-a9d7a3c85dab-kube-api-access-jvb8f\") on node \"crc\" DevicePath \"\"" Dec 04 09:54:16 crc kubenswrapper[4707]: I1204 09:54:16.950980 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fba6010-a6b6-440f-a293-a9d7a3c85dab-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 09:54:17 crc kubenswrapper[4707]: I1204 09:54:17.361741 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmv5j" event={"ID":"8fba6010-a6b6-440f-a293-a9d7a3c85dab","Type":"ContainerDied","Data":"0bede0959a7853266e881badfe5c1c33cfa8a3f33a0fdede04075b9d55f8f513"} Dec 04 09:54:17 crc kubenswrapper[4707]: I1204 09:54:17.361798 4707 scope.go:117] "RemoveContainer" containerID="70e1a33477aa55a36850505c6901f6cc76b86d81dda6bd0fb9dab60bf2a016eb" Dec 04 09:54:17 crc kubenswrapper[4707]: I1204 09:54:17.361811 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dmv5j" Dec 04 09:54:17 crc kubenswrapper[4707]: E1204 09:54:17.364134 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:d9a2d8b19d3de4658dd26a2e781d00002e937738bfe6a1d0cf6c68c015085f4a\\\"\"" pod="manila-kuttl-tests/openstack-galera-0" podUID="79cd0cfb-7c57-4a38-97c7-a40a24097d29" Dec 04 09:54:17 crc kubenswrapper[4707]: E1204 09:54:17.364298 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:d9a2d8b19d3de4658dd26a2e781d00002e937738bfe6a1d0cf6c68c015085f4a\\\"\"" pod="manila-kuttl-tests/openstack-galera-1" podUID="2538c764-a696-4ce4-95fa-58c782e0b71f" Dec 04 09:54:17 crc kubenswrapper[4707]: E1204 09:54:17.364174 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:d9a2d8b19d3de4658dd26a2e781d00002e937738bfe6a1d0cf6c68c015085f4a\\\"\"" pod="manila-kuttl-tests/openstack-galera-2" podUID="3b373699-1303-4b1a-914d-7764376f5b38" Dec 04 09:54:17 crc kubenswrapper[4707]: I1204 09:54:17.454648 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dmv5j"] Dec 04 09:54:17 crc kubenswrapper[4707]: I1204 09:54:17.460129 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dmv5j"] Dec 04 09:54:18 crc kubenswrapper[4707]: I1204 09:54:18.597223 4707 scope.go:117] "RemoveContainer" containerID="d30360bad2e3b9c38a1a9fe7efdae0200614a66f1431b4ce0dee522dadff1740" Dec 04 09:54:18 crc kubenswrapper[4707]: I1204 09:54:18.772622 4707 scope.go:117] "RemoveContainer" containerID="1bb56015c247cd94bb664ff903a14b9aeae43f1221b43ce0120bcc91841f196b" Dec 04 09:54:18 crc kubenswrapper[4707]: I1204 09:54:18.855640 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fba6010-a6b6-440f-a293-a9d7a3c85dab" path="/var/lib/kubelet/pods/8fba6010-a6b6-440f-a293-a9d7a3c85dab/volumes" Dec 04 09:54:20 crc kubenswrapper[4707]: I1204 09:54:20.384753 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" event={"ID":"dee882a8-abdc-45cb-b451-121bd9579e0f","Type":"ContainerStarted","Data":"1439692c614e0a37a6ece66472d71a5abd2d940577fd6cab4fd5836cfd138ba6"} Dec 04 09:54:20 crc kubenswrapper[4707]: I1204 09:54:20.385531 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 09:54:20 crc kubenswrapper[4707]: I1204 09:54:20.392524 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 09:54:20 crc kubenswrapper[4707]: I1204 09:54:20.408845 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" podStartSLOduration=2.459019488 podStartE2EDuration="22.408823603s" podCreationTimestamp="2025-12-04 09:53:58 +0000 UTC" firstStartedPulling="2025-12-04 09:53:59.5139977 +0000 UTC m=+938.949820207" lastFinishedPulling="2025-12-04 09:54:19.463801815 +0000 UTC m=+958.899624322" observedRunningTime="2025-12-04 09:54:20.402248345 +0000 UTC m=+959.838070862" watchObservedRunningTime="2025-12-04 09:54:20.408823603 +0000 UTC m=+959.844646110" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.473210 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/memcached-0"] Dec 04 09:54:22 crc kubenswrapper[4707]: E1204 09:54:22.474665 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe797225-935d-4660-8763-8877a6eb5563" containerName="extract-content" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.474689 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe797225-935d-4660-8763-8877a6eb5563" containerName="extract-content" Dec 04 09:54:22 crc kubenswrapper[4707]: E1204 09:54:22.474711 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe797225-935d-4660-8763-8877a6eb5563" containerName="extract-utilities" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.474720 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe797225-935d-4660-8763-8877a6eb5563" containerName="extract-utilities" Dec 04 09:54:22 crc kubenswrapper[4707]: E1204 09:54:22.474732 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fba6010-a6b6-440f-a293-a9d7a3c85dab" containerName="registry-server" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.474740 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fba6010-a6b6-440f-a293-a9d7a3c85dab" containerName="registry-server" Dec 04 09:54:22 crc kubenswrapper[4707]: E1204 09:54:22.474749 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe797225-935d-4660-8763-8877a6eb5563" containerName="registry-server" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.474755 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe797225-935d-4660-8763-8877a6eb5563" containerName="registry-server" Dec 04 09:54:22 crc kubenswrapper[4707]: E1204 09:54:22.474763 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fba6010-a6b6-440f-a293-a9d7a3c85dab" containerName="extract-utilities" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.474770 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fba6010-a6b6-440f-a293-a9d7a3c85dab" containerName="extract-utilities" Dec 04 09:54:22 crc kubenswrapper[4707]: E1204 09:54:22.474780 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fba6010-a6b6-440f-a293-a9d7a3c85dab" containerName="extract-content" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.474788 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fba6010-a6b6-440f-a293-a9d7a3c85dab" containerName="extract-content" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.474945 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe797225-935d-4660-8763-8877a6eb5563" containerName="registry-server" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.474962 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fba6010-a6b6-440f-a293-a9d7a3c85dab" containerName="registry-server" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.475537 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/memcached-0" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.479896 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"manila-kuttl-tests"/"memcached-config-data" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.480148 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"memcached-memcached-dockercfg-hcgnd" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.483238 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/memcached-0"] Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.635565 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmx9d\" (UniqueName: \"kubernetes.io/projected/b38182e9-ec12-42a7-b506-83ba39b9042c-kube-api-access-cmx9d\") pod \"memcached-0\" (UID: \"b38182e9-ec12-42a7-b506-83ba39b9042c\") " pod="manila-kuttl-tests/memcached-0" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.635642 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b38182e9-ec12-42a7-b506-83ba39b9042c-config-data\") pod \"memcached-0\" (UID: \"b38182e9-ec12-42a7-b506-83ba39b9042c\") " pod="manila-kuttl-tests/memcached-0" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.635699 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b38182e9-ec12-42a7-b506-83ba39b9042c-kolla-config\") pod \"memcached-0\" (UID: \"b38182e9-ec12-42a7-b506-83ba39b9042c\") " pod="manila-kuttl-tests/memcached-0" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.737019 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b38182e9-ec12-42a7-b506-83ba39b9042c-config-data\") pod \"memcached-0\" (UID: \"b38182e9-ec12-42a7-b506-83ba39b9042c\") " pod="manila-kuttl-tests/memcached-0" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.737115 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b38182e9-ec12-42a7-b506-83ba39b9042c-kolla-config\") pod \"memcached-0\" (UID: \"b38182e9-ec12-42a7-b506-83ba39b9042c\") " pod="manila-kuttl-tests/memcached-0" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.737197 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmx9d\" (UniqueName: \"kubernetes.io/projected/b38182e9-ec12-42a7-b506-83ba39b9042c-kube-api-access-cmx9d\") pod \"memcached-0\" (UID: \"b38182e9-ec12-42a7-b506-83ba39b9042c\") " pod="manila-kuttl-tests/memcached-0" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.738032 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b38182e9-ec12-42a7-b506-83ba39b9042c-config-data\") pod \"memcached-0\" (UID: \"b38182e9-ec12-42a7-b506-83ba39b9042c\") " pod="manila-kuttl-tests/memcached-0" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.739089 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b38182e9-ec12-42a7-b506-83ba39b9042c-kolla-config\") pod \"memcached-0\" (UID: \"b38182e9-ec12-42a7-b506-83ba39b9042c\") " pod="manila-kuttl-tests/memcached-0" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.759701 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmx9d\" (UniqueName: \"kubernetes.io/projected/b38182e9-ec12-42a7-b506-83ba39b9042c-kube-api-access-cmx9d\") pod \"memcached-0\" (UID: \"b38182e9-ec12-42a7-b506-83ba39b9042c\") " pod="manila-kuttl-tests/memcached-0" Dec 04 09:54:22 crc kubenswrapper[4707]: I1204 09:54:22.792551 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/memcached-0" Dec 04 09:54:23 crc kubenswrapper[4707]: I1204 09:54:23.061553 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-prnzl"] Dec 04 09:54:23 crc kubenswrapper[4707]: I1204 09:54:23.064453 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" Dec 04 09:54:23 crc kubenswrapper[4707]: I1204 09:54:23.071533 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-2cvv6" Dec 04 09:54:23 crc kubenswrapper[4707]: I1204 09:54:23.076669 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-prnzl"] Dec 04 09:54:23 crc kubenswrapper[4707]: I1204 09:54:23.162063 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/memcached-0"] Dec 04 09:54:23 crc kubenswrapper[4707]: I1204 09:54:23.245806 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h76wc\" (UniqueName: \"kubernetes.io/projected/07f17250-36c1-4129-aa10-2e5c81fed559-kube-api-access-h76wc\") pod \"rabbitmq-cluster-operator-index-prnzl\" (UID: \"07f17250-36c1-4129-aa10-2e5c81fed559\") " pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" Dec 04 09:54:23 crc kubenswrapper[4707]: I1204 09:54:23.347483 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h76wc\" (UniqueName: \"kubernetes.io/projected/07f17250-36c1-4129-aa10-2e5c81fed559-kube-api-access-h76wc\") pod \"rabbitmq-cluster-operator-index-prnzl\" (UID: \"07f17250-36c1-4129-aa10-2e5c81fed559\") " pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" Dec 04 09:54:23 crc kubenswrapper[4707]: I1204 09:54:23.372989 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h76wc\" (UniqueName: \"kubernetes.io/projected/07f17250-36c1-4129-aa10-2e5c81fed559-kube-api-access-h76wc\") pod \"rabbitmq-cluster-operator-index-prnzl\" (UID: \"07f17250-36c1-4129-aa10-2e5c81fed559\") " pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" Dec 04 09:54:23 crc kubenswrapper[4707]: I1204 09:54:23.386167 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" Dec 04 09:54:23 crc kubenswrapper[4707]: I1204 09:54:23.406084 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/memcached-0" event={"ID":"b38182e9-ec12-42a7-b506-83ba39b9042c","Type":"ContainerStarted","Data":"1d945753a1305cfc30656c1ba54b1ccd619aa589826d3b6caada3575dc4f0b99"} Dec 04 09:54:23 crc kubenswrapper[4707]: I1204 09:54:23.601987 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-prnzl"] Dec 04 09:54:24 crc kubenswrapper[4707]: I1204 09:54:24.417454 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" event={"ID":"07f17250-36c1-4129-aa10-2e5c81fed559","Type":"ContainerStarted","Data":"063610d2e528440c60e5e0fe28e20c02e1db9a441c4d972befd616877ccd7bdf"} Dec 04 09:54:34 crc kubenswrapper[4707]: I1204 09:54:34.636849 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-0" event={"ID":"79cd0cfb-7c57-4a38-97c7-a40a24097d29","Type":"ContainerStarted","Data":"044de3d4eeb2e204f2c09cf4cbb3cc43af3698151054feac19359a22b288f413"} Dec 04 09:54:34 crc kubenswrapper[4707]: I1204 09:54:34.639094 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" event={"ID":"07f17250-36c1-4129-aa10-2e5c81fed559","Type":"ContainerStarted","Data":"0421f57b6a4206bec1496b9cb447f77261e0d34970a6aa26e2c801a61c87bc01"} Dec 04 09:54:34 crc kubenswrapper[4707]: I1204 09:54:34.641775 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-2" event={"ID":"3b373699-1303-4b1a-914d-7764376f5b38","Type":"ContainerStarted","Data":"04810cb0bfd1d640bbb6e137d22e1b808e18a55232a1d09578f80a883551de32"} Dec 04 09:54:34 crc kubenswrapper[4707]: I1204 09:54:34.644007 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/memcached-0" event={"ID":"b38182e9-ec12-42a7-b506-83ba39b9042c","Type":"ContainerStarted","Data":"b5467eccc66bdc7b1b97924d1b861fa1970784955e8b414c0a057b3e1345c1ea"} Dec 04 09:54:34 crc kubenswrapper[4707]: I1204 09:54:34.644598 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/memcached-0" Dec 04 09:54:34 crc kubenswrapper[4707]: I1204 09:54:34.646244 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-1" event={"ID":"2538c764-a696-4ce4-95fa-58c782e0b71f","Type":"ContainerStarted","Data":"e715ffc246aa52628ebf660d05fa28926e8b60cd6634627f6a89f31b9187ae8e"} Dec 04 09:54:34 crc kubenswrapper[4707]: I1204 09:54:34.741769 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" podStartSLOduration=1.124600721 podStartE2EDuration="11.741750312s" podCreationTimestamp="2025-12-04 09:54:23 +0000 UTC" firstStartedPulling="2025-12-04 09:54:23.609621724 +0000 UTC m=+963.045444221" lastFinishedPulling="2025-12-04 09:54:34.226771305 +0000 UTC m=+973.662593812" observedRunningTime="2025-12-04 09:54:34.73825437 +0000 UTC m=+974.174076877" watchObservedRunningTime="2025-12-04 09:54:34.741750312 +0000 UTC m=+974.177572839" Dec 04 09:54:34 crc kubenswrapper[4707]: I1204 09:54:34.801898 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/memcached-0" podStartSLOduration=1.820346158 podStartE2EDuration="12.801873868s" podCreationTimestamp="2025-12-04 09:54:22 +0000 UTC" firstStartedPulling="2025-12-04 09:54:23.17007318 +0000 UTC m=+962.605895687" lastFinishedPulling="2025-12-04 09:54:34.15160089 +0000 UTC m=+973.587423397" observedRunningTime="2025-12-04 09:54:34.785463278 +0000 UTC m=+974.221285855" watchObservedRunningTime="2025-12-04 09:54:34.801873868 +0000 UTC m=+974.237696375" Dec 04 09:54:38 crc kubenswrapper[4707]: I1204 09:54:38.673947 4707 generic.go:334] "Generic (PLEG): container finished" podID="3b373699-1303-4b1a-914d-7764376f5b38" containerID="04810cb0bfd1d640bbb6e137d22e1b808e18a55232a1d09578f80a883551de32" exitCode=0 Dec 04 09:54:38 crc kubenswrapper[4707]: I1204 09:54:38.674132 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-2" event={"ID":"3b373699-1303-4b1a-914d-7764376f5b38","Type":"ContainerDied","Data":"04810cb0bfd1d640bbb6e137d22e1b808e18a55232a1d09578f80a883551de32"} Dec 04 09:54:38 crc kubenswrapper[4707]: I1204 09:54:38.677952 4707 generic.go:334] "Generic (PLEG): container finished" podID="2538c764-a696-4ce4-95fa-58c782e0b71f" containerID="e715ffc246aa52628ebf660d05fa28926e8b60cd6634627f6a89f31b9187ae8e" exitCode=0 Dec 04 09:54:38 crc kubenswrapper[4707]: I1204 09:54:38.678105 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-1" event={"ID":"2538c764-a696-4ce4-95fa-58c782e0b71f","Type":"ContainerDied","Data":"e715ffc246aa52628ebf660d05fa28926e8b60cd6634627f6a89f31b9187ae8e"} Dec 04 09:54:38 crc kubenswrapper[4707]: I1204 09:54:38.680546 4707 generic.go:334] "Generic (PLEG): container finished" podID="79cd0cfb-7c57-4a38-97c7-a40a24097d29" containerID="044de3d4eeb2e204f2c09cf4cbb3cc43af3698151054feac19359a22b288f413" exitCode=0 Dec 04 09:54:38 crc kubenswrapper[4707]: I1204 09:54:38.680593 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-0" event={"ID":"79cd0cfb-7c57-4a38-97c7-a40a24097d29","Type":"ContainerDied","Data":"044de3d4eeb2e204f2c09cf4cbb3cc43af3698151054feac19359a22b288f413"} Dec 04 09:54:39 crc kubenswrapper[4707]: I1204 09:54:39.688680 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-2" event={"ID":"3b373699-1303-4b1a-914d-7764376f5b38","Type":"ContainerStarted","Data":"1a2d2991641ebb8ef7717910d319fd599a76fc4337fc5f614f2d5f4db8c04789"} Dec 04 09:54:39 crc kubenswrapper[4707]: I1204 09:54:39.690821 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-1" event={"ID":"2538c764-a696-4ce4-95fa-58c782e0b71f","Type":"ContainerStarted","Data":"4d5787839e341a844c199843a3505da5ae9f7972cd12ca9fb557c6dfbe113e96"} Dec 04 09:54:39 crc kubenswrapper[4707]: I1204 09:54:39.692768 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-0" event={"ID":"79cd0cfb-7c57-4a38-97c7-a40a24097d29","Type":"ContainerStarted","Data":"940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b"} Dec 04 09:54:39 crc kubenswrapper[4707]: I1204 09:54:39.713275 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/openstack-galera-2" podStartSLOduration=8.810221744 podStartE2EDuration="43.713252944s" podCreationTimestamp="2025-12-04 09:53:56 +0000 UTC" firstStartedPulling="2025-12-04 09:53:59.247516296 +0000 UTC m=+938.683338803" lastFinishedPulling="2025-12-04 09:54:34.150547496 +0000 UTC m=+973.586370003" observedRunningTime="2025-12-04 09:54:39.708049079 +0000 UTC m=+979.143871596" watchObservedRunningTime="2025-12-04 09:54:39.713252944 +0000 UTC m=+979.149075451" Dec 04 09:54:39 crc kubenswrapper[4707]: I1204 09:54:39.744041 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/openstack-galera-1" podStartSLOduration=8.95888691 podStartE2EDuration="43.74402243s" podCreationTimestamp="2025-12-04 09:53:56 +0000 UTC" firstStartedPulling="2025-12-04 09:53:59.368397541 +0000 UTC m=+938.804220058" lastFinishedPulling="2025-12-04 09:54:34.153533071 +0000 UTC m=+973.589355578" observedRunningTime="2025-12-04 09:54:39.740993853 +0000 UTC m=+979.176816350" watchObservedRunningTime="2025-12-04 09:54:39.74402243 +0000 UTC m=+979.179844937" Dec 04 09:54:39 crc kubenswrapper[4707]: I1204 09:54:39.763420 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/openstack-galera-0" podStartSLOduration=8.608510395 podStartE2EDuration="43.763320012s" podCreationTimestamp="2025-12-04 09:53:56 +0000 UTC" firstStartedPulling="2025-12-04 09:53:58.996724721 +0000 UTC m=+938.432547218" lastFinishedPulling="2025-12-04 09:54:34.151534328 +0000 UTC m=+973.587356835" observedRunningTime="2025-12-04 09:54:39.758314233 +0000 UTC m=+979.194136750" watchObservedRunningTime="2025-12-04 09:54:39.763320012 +0000 UTC m=+979.199142519" Dec 04 09:54:42 crc kubenswrapper[4707]: I1204 09:54:42.794195 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/memcached-0" Dec 04 09:54:43 crc kubenswrapper[4707]: I1204 09:54:43.386442 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" Dec 04 09:54:43 crc kubenswrapper[4707]: I1204 09:54:43.386494 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" Dec 04 09:54:43 crc kubenswrapper[4707]: I1204 09:54:43.415471 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" Dec 04 09:54:43 crc kubenswrapper[4707]: I1204 09:54:43.744297 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" Dec 04 09:54:48 crc kubenswrapper[4707]: I1204 09:54:48.172071 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:54:48 crc kubenswrapper[4707]: I1204 09:54:48.172744 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:54:48 crc kubenswrapper[4707]: I1204 09:54:48.231072 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:54:48 crc kubenswrapper[4707]: I1204 09:54:48.231395 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:54:48 crc kubenswrapper[4707]: I1204 09:54:48.273678 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:54:48 crc kubenswrapper[4707]: I1204 09:54:48.273737 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:54:48 crc kubenswrapper[4707]: I1204 09:54:48.296066 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:54:48 crc kubenswrapper[4707]: I1204 09:54:48.807092 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/openstack-galera-2" Dec 04 09:54:50 crc kubenswrapper[4707]: I1204 09:54:50.932688 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9"] Dec 04 09:54:50 crc kubenswrapper[4707]: I1204 09:54:50.934196 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" Dec 04 09:54:50 crc kubenswrapper[4707]: I1204 09:54:50.936324 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-zklcg" Dec 04 09:54:50 crc kubenswrapper[4707]: I1204 09:54:50.955062 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9"] Dec 04 09:54:51 crc kubenswrapper[4707]: I1204 09:54:51.018537 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9\" (UID: \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" Dec 04 09:54:51 crc kubenswrapper[4707]: I1204 09:54:51.018598 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnnqj\" (UniqueName: \"kubernetes.io/projected/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-kube-api-access-xnnqj\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9\" (UID: \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" Dec 04 09:54:51 crc kubenswrapper[4707]: I1204 09:54:51.018629 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9\" (UID: \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" Dec 04 09:54:51 crc kubenswrapper[4707]: I1204 09:54:51.120122 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9\" (UID: \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" Dec 04 09:54:51 crc kubenswrapper[4707]: I1204 09:54:51.120170 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnnqj\" (UniqueName: \"kubernetes.io/projected/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-kube-api-access-xnnqj\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9\" (UID: \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" Dec 04 09:54:51 crc kubenswrapper[4707]: I1204 09:54:51.120200 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9\" (UID: \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" Dec 04 09:54:51 crc kubenswrapper[4707]: I1204 09:54:51.120814 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9\" (UID: \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" Dec 04 09:54:51 crc kubenswrapper[4707]: I1204 09:54:51.120830 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9\" (UID: \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" Dec 04 09:54:51 crc kubenswrapper[4707]: I1204 09:54:51.140762 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnnqj\" (UniqueName: \"kubernetes.io/projected/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-kube-api-access-xnnqj\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9\" (UID: \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" Dec 04 09:54:51 crc kubenswrapper[4707]: I1204 09:54:51.250151 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" Dec 04 09:54:51 crc kubenswrapper[4707]: I1204 09:54:51.691130 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9"] Dec 04 09:54:51 crc kubenswrapper[4707]: W1204 09:54:51.702493 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50e8161a_5e0d_4468_a1b8_de0e9d48bcee.slice/crio-64e2d613162059762a2d931389d7669d0566097ac545abd7f54453741a411069 WatchSource:0}: Error finding container 64e2d613162059762a2d931389d7669d0566097ac545abd7f54453741a411069: Status 404 returned error can't find the container with id 64e2d613162059762a2d931389d7669d0566097ac545abd7f54453741a411069 Dec 04 09:54:51 crc kubenswrapper[4707]: I1204 09:54:51.759711 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" event={"ID":"50e8161a-5e0d-4468-a1b8-de0e9d48bcee","Type":"ContainerStarted","Data":"64e2d613162059762a2d931389d7669d0566097ac545abd7f54453741a411069"} Dec 04 09:54:53 crc kubenswrapper[4707]: I1204 09:54:53.769931 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" event={"ID":"50e8161a-5e0d-4468-a1b8-de0e9d48bcee","Type":"ContainerStarted","Data":"f2a062a845f6899105a1d1e8c1b827e95733dec4f29b0fd82090c587a47cac36"} Dec 04 09:54:54 crc kubenswrapper[4707]: I1204 09:54:54.776030 4707 generic.go:334] "Generic (PLEG): container finished" podID="50e8161a-5e0d-4468-a1b8-de0e9d48bcee" containerID="f2a062a845f6899105a1d1e8c1b827e95733dec4f29b0fd82090c587a47cac36" exitCode=0 Dec 04 09:54:54 crc kubenswrapper[4707]: I1204 09:54:54.776080 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" event={"ID":"50e8161a-5e0d-4468-a1b8-de0e9d48bcee","Type":"ContainerDied","Data":"f2a062a845f6899105a1d1e8c1b827e95733dec4f29b0fd82090c587a47cac36"} Dec 04 09:54:56 crc kubenswrapper[4707]: I1204 09:54:56.800350 4707 generic.go:334] "Generic (PLEG): container finished" podID="50e8161a-5e0d-4468-a1b8-de0e9d48bcee" containerID="0300df74318d501be1b33daefb1051812504535f8956dc6b9a249f69f0ca529b" exitCode=0 Dec 04 09:54:56 crc kubenswrapper[4707]: I1204 09:54:56.800449 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" event={"ID":"50e8161a-5e0d-4468-a1b8-de0e9d48bcee","Type":"ContainerDied","Data":"0300df74318d501be1b33daefb1051812504535f8956dc6b9a249f69f0ca529b"} Dec 04 09:54:57 crc kubenswrapper[4707]: I1204 09:54:57.807529 4707 generic.go:334] "Generic (PLEG): container finished" podID="50e8161a-5e0d-4468-a1b8-de0e9d48bcee" containerID="15763f03267645202b4ca0e478c0c888d3a67cb74adfe0438694c8104554fab9" exitCode=0 Dec 04 09:54:57 crc kubenswrapper[4707]: I1204 09:54:57.807609 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" event={"ID":"50e8161a-5e0d-4468-a1b8-de0e9d48bcee","Type":"ContainerDied","Data":"15763f03267645202b4ca0e478c0c888d3a67cb74adfe0438694c8104554fab9"} Dec 04 09:54:58 crc kubenswrapper[4707]: I1204 09:54:58.291169 4707 prober.go:107] "Probe failed" probeType="Readiness" pod="manila-kuttl-tests/openstack-galera-2" podUID="3b373699-1303-4b1a-914d-7764376f5b38" containerName="galera" probeResult="failure" output=< Dec 04 09:54:58 crc kubenswrapper[4707]: wsrep_local_state_comment (Donor/Desynced) differs from Synced Dec 04 09:54:58 crc kubenswrapper[4707]: > Dec 04 09:54:59 crc kubenswrapper[4707]: I1204 09:54:59.330856 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" Dec 04 09:54:59 crc kubenswrapper[4707]: I1204 09:54:59.434737 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-util\") pod \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\" (UID: \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\") " Dec 04 09:54:59 crc kubenswrapper[4707]: I1204 09:54:59.434870 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-bundle\") pod \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\" (UID: \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\") " Dec 04 09:54:59 crc kubenswrapper[4707]: I1204 09:54:59.434907 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnnqj\" (UniqueName: \"kubernetes.io/projected/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-kube-api-access-xnnqj\") pod \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\" (UID: \"50e8161a-5e0d-4468-a1b8-de0e9d48bcee\") " Dec 04 09:54:59 crc kubenswrapper[4707]: I1204 09:54:59.435711 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-bundle" (OuterVolumeSpecName: "bundle") pod "50e8161a-5e0d-4468-a1b8-de0e9d48bcee" (UID: "50e8161a-5e0d-4468-a1b8-de0e9d48bcee"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:54:59 crc kubenswrapper[4707]: I1204 09:54:59.440630 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-kube-api-access-xnnqj" (OuterVolumeSpecName: "kube-api-access-xnnqj") pod "50e8161a-5e0d-4468-a1b8-de0e9d48bcee" (UID: "50e8161a-5e0d-4468-a1b8-de0e9d48bcee"). InnerVolumeSpecName "kube-api-access-xnnqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:54:59 crc kubenswrapper[4707]: I1204 09:54:59.446207 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-util" (OuterVolumeSpecName: "util") pod "50e8161a-5e0d-4468-a1b8-de0e9d48bcee" (UID: "50e8161a-5e0d-4468-a1b8-de0e9d48bcee"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:54:59 crc kubenswrapper[4707]: I1204 09:54:59.536071 4707 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-util\") on node \"crc\" DevicePath \"\"" Dec 04 09:54:59 crc kubenswrapper[4707]: I1204 09:54:59.536105 4707 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:54:59 crc kubenswrapper[4707]: I1204 09:54:59.536114 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnnqj\" (UniqueName: \"kubernetes.io/projected/50e8161a-5e0d-4468-a1b8-de0e9d48bcee-kube-api-access-xnnqj\") on node \"crc\" DevicePath \"\"" Dec 04 09:54:59 crc kubenswrapper[4707]: I1204 09:54:59.822607 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" event={"ID":"50e8161a-5e0d-4468-a1b8-de0e9d48bcee","Type":"ContainerDied","Data":"64e2d613162059762a2d931389d7669d0566097ac545abd7f54453741a411069"} Dec 04 09:54:59 crc kubenswrapper[4707]: I1204 09:54:59.822646 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64e2d613162059762a2d931389d7669d0566097ac545abd7f54453741a411069" Dec 04 09:54:59 crc kubenswrapper[4707]: I1204 09:54:59.822662 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9" Dec 04 09:55:00 crc kubenswrapper[4707]: I1204 09:55:00.817288 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:55:00 crc kubenswrapper[4707]: I1204 09:55:00.817595 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:55:01 crc kubenswrapper[4707]: I1204 09:55:01.318032 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:55:01 crc kubenswrapper[4707]: I1204 09:55:01.387740 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/openstack-galera-0" Dec 04 09:55:03 crc kubenswrapper[4707]: I1204 09:55:03.350314 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:55:03 crc kubenswrapper[4707]: I1204 09:55:03.417249 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/openstack-galera-1" Dec 04 09:55:05 crc kubenswrapper[4707]: I1204 09:55:05.974084 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb"] Dec 04 09:55:05 crc kubenswrapper[4707]: E1204 09:55:05.974689 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50e8161a-5e0d-4468-a1b8-de0e9d48bcee" containerName="pull" Dec 04 09:55:05 crc kubenswrapper[4707]: I1204 09:55:05.974714 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="50e8161a-5e0d-4468-a1b8-de0e9d48bcee" containerName="pull" Dec 04 09:55:05 crc kubenswrapper[4707]: E1204 09:55:05.974735 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50e8161a-5e0d-4468-a1b8-de0e9d48bcee" containerName="util" Dec 04 09:55:05 crc kubenswrapper[4707]: I1204 09:55:05.974743 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="50e8161a-5e0d-4468-a1b8-de0e9d48bcee" containerName="util" Dec 04 09:55:05 crc kubenswrapper[4707]: E1204 09:55:05.974768 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50e8161a-5e0d-4468-a1b8-de0e9d48bcee" containerName="extract" Dec 04 09:55:05 crc kubenswrapper[4707]: I1204 09:55:05.974777 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="50e8161a-5e0d-4468-a1b8-de0e9d48bcee" containerName="extract" Dec 04 09:55:05 crc kubenswrapper[4707]: I1204 09:55:05.974949 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="50e8161a-5e0d-4468-a1b8-de0e9d48bcee" containerName="extract" Dec 04 09:55:05 crc kubenswrapper[4707]: I1204 09:55:05.975932 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb" Dec 04 09:55:05 crc kubenswrapper[4707]: I1204 09:55:05.978835 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-fz9hg" Dec 04 09:55:05 crc kubenswrapper[4707]: I1204 09:55:05.982037 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb"] Dec 04 09:55:06 crc kubenswrapper[4707]: I1204 09:55:06.090908 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhrdp\" (UniqueName: \"kubernetes.io/projected/ff3cb35d-0e1e-41a5-ba41-efc60015c860-kube-api-access-bhrdp\") pod \"rabbitmq-cluster-operator-779fc9694b-twnpb\" (UID: \"ff3cb35d-0e1e-41a5-ba41-efc60015c860\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb" Dec 04 09:55:06 crc kubenswrapper[4707]: I1204 09:55:06.192395 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhrdp\" (UniqueName: \"kubernetes.io/projected/ff3cb35d-0e1e-41a5-ba41-efc60015c860-kube-api-access-bhrdp\") pod \"rabbitmq-cluster-operator-779fc9694b-twnpb\" (UID: \"ff3cb35d-0e1e-41a5-ba41-efc60015c860\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb" Dec 04 09:55:06 crc kubenswrapper[4707]: I1204 09:55:06.216368 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhrdp\" (UniqueName: \"kubernetes.io/projected/ff3cb35d-0e1e-41a5-ba41-efc60015c860-kube-api-access-bhrdp\") pod \"rabbitmq-cluster-operator-779fc9694b-twnpb\" (UID: \"ff3cb35d-0e1e-41a5-ba41-efc60015c860\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb" Dec 04 09:55:06 crc kubenswrapper[4707]: I1204 09:55:06.305682 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb" Dec 04 09:55:06 crc kubenswrapper[4707]: I1204 09:55:06.752045 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb"] Dec 04 09:55:06 crc kubenswrapper[4707]: I1204 09:55:06.861404 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb" event={"ID":"ff3cb35d-0e1e-41a5-ba41-efc60015c860","Type":"ContainerStarted","Data":"bf646547734b53b1a6b6611d91f3be15144a3b997a2de817afd9ea9125980bc8"} Dec 04 09:55:11 crc kubenswrapper[4707]: I1204 09:55:11.926887 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb" event={"ID":"ff3cb35d-0e1e-41a5-ba41-efc60015c860","Type":"ContainerStarted","Data":"1a85e44377fe4e060de58e0bf9c07bb8b45437d23f2159373edb6007b7d46edf"} Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.046323 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb" podStartSLOduration=6.556930243 podStartE2EDuration="11.046307851s" podCreationTimestamp="2025-12-04 09:55:05 +0000 UTC" firstStartedPulling="2025-12-04 09:55:06.755405583 +0000 UTC m=+1006.191228090" lastFinishedPulling="2025-12-04 09:55:11.244783191 +0000 UTC m=+1010.680605698" observedRunningTime="2025-12-04 09:55:11.947784012 +0000 UTC m=+1011.383606519" watchObservedRunningTime="2025-12-04 09:55:16.046307851 +0000 UTC m=+1015.482130358" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.046900 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/rabbitmq-server-0"] Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.047867 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.050050 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"rabbitmq-default-user" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.050062 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"rabbitmq-erlang-cookie" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.050114 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"manila-kuttl-tests"/"rabbitmq-server-conf" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.050776 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"manila-kuttl-tests"/"rabbitmq-plugins-conf" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.050827 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"rabbitmq-server-dockercfg-42dmd" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.065893 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/rabbitmq-server-0"] Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.207320 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.207430 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x4l6\" (UniqueName: \"kubernetes.io/projected/51dd2aae-c620-4d95-b261-1cb6065096e3-kube-api-access-2x4l6\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.207495 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.207529 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.207563 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-0bb6276d-38a3-4fbf-9004-48537ed76590\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0bb6276d-38a3-4fbf-9004-48537ed76590\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.207742 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/51dd2aae-c620-4d95-b261-1cb6065096e3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.207791 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/51dd2aae-c620-4d95-b261-1cb6065096e3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.207927 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/51dd2aae-c620-4d95-b261-1cb6065096e3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.308909 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/51dd2aae-c620-4d95-b261-1cb6065096e3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.308993 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.309022 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x4l6\" (UniqueName: \"kubernetes.io/projected/51dd2aae-c620-4d95-b261-1cb6065096e3-kube-api-access-2x4l6\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.309056 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.309074 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.309097 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-0bb6276d-38a3-4fbf-9004-48537ed76590\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0bb6276d-38a3-4fbf-9004-48537ed76590\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.309127 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/51dd2aae-c620-4d95-b261-1cb6065096e3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.309148 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/51dd2aae-c620-4d95-b261-1cb6065096e3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.310144 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.310411 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.316016 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.320675 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/51dd2aae-c620-4d95-b261-1cb6065096e3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.358438 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/51dd2aae-c620-4d95-b261-1cb6065096e3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.362120 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/51dd2aae-c620-4d95-b261-1cb6065096e3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.365085 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x4l6\" (UniqueName: \"kubernetes.io/projected/51dd2aae-c620-4d95-b261-1cb6065096e3-kube-api-access-2x4l6\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.391239 4707 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.391312 4707 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-0bb6276d-38a3-4fbf-9004-48537ed76590\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0bb6276d-38a3-4fbf-9004-48537ed76590\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/c351ee343d834061e35d108d8da0071e518235427308aa20522ef469a38b4ec5/globalmount\"" pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.416082 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-0bb6276d-38a3-4fbf-9004-48537ed76590\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0bb6276d-38a3-4fbf-9004-48537ed76590\") pod \"rabbitmq-server-0\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:16 crc kubenswrapper[4707]: I1204 09:55:16.665869 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:55:17 crc kubenswrapper[4707]: I1204 09:55:17.221011 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/rabbitmq-server-0"] Dec 04 09:55:17 crc kubenswrapper[4707]: W1204 09:55:17.227703 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51dd2aae_c620_4d95_b261_1cb6065096e3.slice/crio-65d6e14e5ee811d24dde0c4f8e9aa27cfa046301d6e085a0c5591e0b1983ba39 WatchSource:0}: Error finding container 65d6e14e5ee811d24dde0c4f8e9aa27cfa046301d6e085a0c5591e0b1983ba39: Status 404 returned error can't find the container with id 65d6e14e5ee811d24dde0c4f8e9aa27cfa046301d6e085a0c5591e0b1983ba39 Dec 04 09:55:17 crc kubenswrapper[4707]: I1204 09:55:17.655516 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-8xfpv"] Dec 04 09:55:17 crc kubenswrapper[4707]: I1204 09:55:17.656268 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-8xfpv" Dec 04 09:55:17 crc kubenswrapper[4707]: I1204 09:55:17.658288 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-index-dockercfg-bcps9" Dec 04 09:55:17 crc kubenswrapper[4707]: I1204 09:55:17.664453 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-8xfpv"] Dec 04 09:55:17 crc kubenswrapper[4707]: I1204 09:55:17.832599 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kssh\" (UniqueName: \"kubernetes.io/projected/1faad1a1-281e-4341-8bef-0a5b6c8051e4-kube-api-access-6kssh\") pod \"keystone-operator-index-8xfpv\" (UID: \"1faad1a1-281e-4341-8bef-0a5b6c8051e4\") " pod="openstack-operators/keystone-operator-index-8xfpv" Dec 04 09:55:17 crc kubenswrapper[4707]: I1204 09:55:17.934321 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kssh\" (UniqueName: \"kubernetes.io/projected/1faad1a1-281e-4341-8bef-0a5b6c8051e4-kube-api-access-6kssh\") pod \"keystone-operator-index-8xfpv\" (UID: \"1faad1a1-281e-4341-8bef-0a5b6c8051e4\") " pod="openstack-operators/keystone-operator-index-8xfpv" Dec 04 09:55:17 crc kubenswrapper[4707]: I1204 09:55:17.954563 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kssh\" (UniqueName: \"kubernetes.io/projected/1faad1a1-281e-4341-8bef-0a5b6c8051e4-kube-api-access-6kssh\") pod \"keystone-operator-index-8xfpv\" (UID: \"1faad1a1-281e-4341-8bef-0a5b6c8051e4\") " pod="openstack-operators/keystone-operator-index-8xfpv" Dec 04 09:55:17 crc kubenswrapper[4707]: I1204 09:55:17.968988 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/rabbitmq-server-0" event={"ID":"51dd2aae-c620-4d95-b261-1cb6065096e3","Type":"ContainerStarted","Data":"65d6e14e5ee811d24dde0c4f8e9aa27cfa046301d6e085a0c5591e0b1983ba39"} Dec 04 09:55:18 crc kubenswrapper[4707]: I1204 09:55:18.017269 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-8xfpv" Dec 04 09:55:18 crc kubenswrapper[4707]: I1204 09:55:18.918636 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-8xfpv"] Dec 04 09:55:18 crc kubenswrapper[4707]: W1204 09:55:18.929082 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1faad1a1_281e_4341_8bef_0a5b6c8051e4.slice/crio-247031a37e85137d006b33f0a16a36e0c2638c89b591130813be302ebd45a153 WatchSource:0}: Error finding container 247031a37e85137d006b33f0a16a36e0c2638c89b591130813be302ebd45a153: Status 404 returned error can't find the container with id 247031a37e85137d006b33f0a16a36e0c2638c89b591130813be302ebd45a153 Dec 04 09:55:18 crc kubenswrapper[4707]: I1204 09:55:18.981668 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-8xfpv" event={"ID":"1faad1a1-281e-4341-8bef-0a5b6c8051e4","Type":"ContainerStarted","Data":"247031a37e85137d006b33f0a16a36e0c2638c89b591130813be302ebd45a153"} Dec 04 09:55:27 crc kubenswrapper[4707]: I1204 09:55:27.039690 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-8xfpv" event={"ID":"1faad1a1-281e-4341-8bef-0a5b6c8051e4","Type":"ContainerStarted","Data":"df2830c5c10ccd2e82c473f0e278f66af891755ea78df7a2e18f3b8b3e6d000e"} Dec 04 09:55:27 crc kubenswrapper[4707]: I1204 09:55:27.061979 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-8xfpv" podStartSLOduration=2.998933262 podStartE2EDuration="10.061953265s" podCreationTimestamp="2025-12-04 09:55:17 +0000 UTC" firstStartedPulling="2025-12-04 09:55:18.93276379 +0000 UTC m=+1018.368586297" lastFinishedPulling="2025-12-04 09:55:25.995783783 +0000 UTC m=+1025.431606300" observedRunningTime="2025-12-04 09:55:27.055979505 +0000 UTC m=+1026.491802002" watchObservedRunningTime="2025-12-04 09:55:27.061953265 +0000 UTC m=+1026.497775772" Dec 04 09:55:28 crc kubenswrapper[4707]: I1204 09:55:28.018754 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-index-8xfpv" Dec 04 09:55:28 crc kubenswrapper[4707]: I1204 09:55:28.018894 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/keystone-operator-index-8xfpv" Dec 04 09:55:28 crc kubenswrapper[4707]: I1204 09:55:28.048735 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/rabbitmq-server-0" event={"ID":"51dd2aae-c620-4d95-b261-1cb6065096e3","Type":"ContainerStarted","Data":"3af3f3022d4ba5eb3464e5d2a68b6a0dcd4ec5dd3615e806bca9d9f89efbe829"} Dec 04 09:55:28 crc kubenswrapper[4707]: I1204 09:55:28.055975 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/keystone-operator-index-8xfpv" Dec 04 09:55:30 crc kubenswrapper[4707]: I1204 09:55:30.817167 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:55:30 crc kubenswrapper[4707]: I1204 09:55:30.817495 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:55:38 crc kubenswrapper[4707]: I1204 09:55:38.043554 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-index-8xfpv" Dec 04 09:55:49 crc kubenswrapper[4707]: I1204 09:55:49.317388 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527"] Dec 04 09:55:49 crc kubenswrapper[4707]: I1204 09:55:49.320146 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" Dec 04 09:55:49 crc kubenswrapper[4707]: I1204 09:55:49.331527 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527"] Dec 04 09:55:49 crc kubenswrapper[4707]: I1204 09:55:49.334991 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-zklcg" Dec 04 09:55:49 crc kubenswrapper[4707]: I1204 09:55:49.418992 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-bundle\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527\" (UID: \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" Dec 04 09:55:49 crc kubenswrapper[4707]: I1204 09:55:49.419123 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-util\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527\" (UID: \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" Dec 04 09:55:49 crc kubenswrapper[4707]: I1204 09:55:49.419156 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9zrb\" (UniqueName: \"kubernetes.io/projected/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-kube-api-access-l9zrb\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527\" (UID: \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" Dec 04 09:55:49 crc kubenswrapper[4707]: I1204 09:55:49.521181 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-util\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527\" (UID: \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" Dec 04 09:55:49 crc kubenswrapper[4707]: I1204 09:55:49.521244 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9zrb\" (UniqueName: \"kubernetes.io/projected/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-kube-api-access-l9zrb\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527\" (UID: \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" Dec 04 09:55:49 crc kubenswrapper[4707]: I1204 09:55:49.521397 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-bundle\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527\" (UID: \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" Dec 04 09:55:49 crc kubenswrapper[4707]: I1204 09:55:49.521840 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-util\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527\" (UID: \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" Dec 04 09:55:49 crc kubenswrapper[4707]: I1204 09:55:49.522001 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-bundle\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527\" (UID: \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" Dec 04 09:55:49 crc kubenswrapper[4707]: I1204 09:55:49.551380 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9zrb\" (UniqueName: \"kubernetes.io/projected/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-kube-api-access-l9zrb\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527\" (UID: \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" Dec 04 09:55:49 crc kubenswrapper[4707]: I1204 09:55:49.640165 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" Dec 04 09:55:50 crc kubenswrapper[4707]: I1204 09:55:50.101513 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527"] Dec 04 09:55:50 crc kubenswrapper[4707]: I1204 09:55:50.186111 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" event={"ID":"437bf12b-06d2-4fed-a6ff-c5b65eea01fe","Type":"ContainerStarted","Data":"018d7ab1f9af13ec64f68e63d870f6530a1e553a7b4d7fd05c556e4f314a63c1"} Dec 04 09:55:51 crc kubenswrapper[4707]: I1204 09:55:51.194389 4707 generic.go:334] "Generic (PLEG): container finished" podID="437bf12b-06d2-4fed-a6ff-c5b65eea01fe" containerID="d7b3c47c22932b48c16ad0512e043b70a26c15533d71f03899ceee0d1114b255" exitCode=0 Dec 04 09:55:51 crc kubenswrapper[4707]: I1204 09:55:51.194474 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" event={"ID":"437bf12b-06d2-4fed-a6ff-c5b65eea01fe","Type":"ContainerDied","Data":"d7b3c47c22932b48c16ad0512e043b70a26c15533d71f03899ceee0d1114b255"} Dec 04 09:55:52 crc kubenswrapper[4707]: I1204 09:55:52.201686 4707 generic.go:334] "Generic (PLEG): container finished" podID="437bf12b-06d2-4fed-a6ff-c5b65eea01fe" containerID="f457bb18a9a6958560e780e29f57edb2b9dc4864778e9bf00c426dd5094be93a" exitCode=0 Dec 04 09:55:52 crc kubenswrapper[4707]: I1204 09:55:52.201815 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" event={"ID":"437bf12b-06d2-4fed-a6ff-c5b65eea01fe","Type":"ContainerDied","Data":"f457bb18a9a6958560e780e29f57edb2b9dc4864778e9bf00c426dd5094be93a"} Dec 04 09:55:53 crc kubenswrapper[4707]: I1204 09:55:53.209301 4707 generic.go:334] "Generic (PLEG): container finished" podID="437bf12b-06d2-4fed-a6ff-c5b65eea01fe" containerID="9d55cdc04fafa761d32f9ed44f241091b5c5bae804a28d5639a52bf91beaa44b" exitCode=0 Dec 04 09:55:53 crc kubenswrapper[4707]: I1204 09:55:53.209357 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" event={"ID":"437bf12b-06d2-4fed-a6ff-c5b65eea01fe","Type":"ContainerDied","Data":"9d55cdc04fafa761d32f9ed44f241091b5c5bae804a28d5639a52bf91beaa44b"} Dec 04 09:55:54 crc kubenswrapper[4707]: I1204 09:55:54.473212 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" Dec 04 09:55:54 crc kubenswrapper[4707]: I1204 09:55:54.587927 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-bundle\") pod \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\" (UID: \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\") " Dec 04 09:55:54 crc kubenswrapper[4707]: I1204 09:55:54.588011 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-util\") pod \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\" (UID: \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\") " Dec 04 09:55:54 crc kubenswrapper[4707]: I1204 09:55:54.588042 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9zrb\" (UniqueName: \"kubernetes.io/projected/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-kube-api-access-l9zrb\") pod \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\" (UID: \"437bf12b-06d2-4fed-a6ff-c5b65eea01fe\") " Dec 04 09:55:54 crc kubenswrapper[4707]: I1204 09:55:54.588968 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-bundle" (OuterVolumeSpecName: "bundle") pod "437bf12b-06d2-4fed-a6ff-c5b65eea01fe" (UID: "437bf12b-06d2-4fed-a6ff-c5b65eea01fe"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:55:54 crc kubenswrapper[4707]: I1204 09:55:54.600592 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-kube-api-access-l9zrb" (OuterVolumeSpecName: "kube-api-access-l9zrb") pod "437bf12b-06d2-4fed-a6ff-c5b65eea01fe" (UID: "437bf12b-06d2-4fed-a6ff-c5b65eea01fe"). InnerVolumeSpecName "kube-api-access-l9zrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:55:54 crc kubenswrapper[4707]: I1204 09:55:54.604260 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-util" (OuterVolumeSpecName: "util") pod "437bf12b-06d2-4fed-a6ff-c5b65eea01fe" (UID: "437bf12b-06d2-4fed-a6ff-c5b65eea01fe"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:55:54 crc kubenswrapper[4707]: I1204 09:55:54.690182 4707 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:55:54 crc kubenswrapper[4707]: I1204 09:55:54.690230 4707 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-util\") on node \"crc\" DevicePath \"\"" Dec 04 09:55:54 crc kubenswrapper[4707]: I1204 09:55:54.690243 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9zrb\" (UniqueName: \"kubernetes.io/projected/437bf12b-06d2-4fed-a6ff-c5b65eea01fe-kube-api-access-l9zrb\") on node \"crc\" DevicePath \"\"" Dec 04 09:55:55 crc kubenswrapper[4707]: I1204 09:55:55.222865 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" event={"ID":"437bf12b-06d2-4fed-a6ff-c5b65eea01fe","Type":"ContainerDied","Data":"018d7ab1f9af13ec64f68e63d870f6530a1e553a7b4d7fd05c556e4f314a63c1"} Dec 04 09:55:55 crc kubenswrapper[4707]: I1204 09:55:55.223172 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="018d7ab1f9af13ec64f68e63d870f6530a1e553a7b4d7fd05c556e4f314a63c1" Dec 04 09:55:55 crc kubenswrapper[4707]: I1204 09:55:55.223266 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527" Dec 04 09:55:59 crc kubenswrapper[4707]: I1204 09:55:59.249058 4707 generic.go:334] "Generic (PLEG): container finished" podID="51dd2aae-c620-4d95-b261-1cb6065096e3" containerID="3af3f3022d4ba5eb3464e5d2a68b6a0dcd4ec5dd3615e806bca9d9f89efbe829" exitCode=0 Dec 04 09:55:59 crc kubenswrapper[4707]: I1204 09:55:59.249191 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/rabbitmq-server-0" event={"ID":"51dd2aae-c620-4d95-b261-1cb6065096e3","Type":"ContainerDied","Data":"3af3f3022d4ba5eb3464e5d2a68b6a0dcd4ec5dd3615e806bca9d9f89efbe829"} Dec 04 09:56:00 crc kubenswrapper[4707]: I1204 09:56:00.256194 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/rabbitmq-server-0" event={"ID":"51dd2aae-c620-4d95-b261-1cb6065096e3","Type":"ContainerStarted","Data":"10f9c023d7770a7c66c2a81642e90a309ba6299d8ca08937883515507bbd2e02"} Dec 04 09:56:00 crc kubenswrapper[4707]: I1204 09:56:00.256711 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:56:00 crc kubenswrapper[4707]: I1204 09:56:00.281030 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/rabbitmq-server-0" podStartSLOduration=36.509566566 podStartE2EDuration="45.281008796s" podCreationTimestamp="2025-12-04 09:55:15 +0000 UTC" firstStartedPulling="2025-12-04 09:55:17.230035833 +0000 UTC m=+1016.665858340" lastFinishedPulling="2025-12-04 09:55:26.001478043 +0000 UTC m=+1025.437300570" observedRunningTime="2025-12-04 09:56:00.279088925 +0000 UTC m=+1059.714911432" watchObservedRunningTime="2025-12-04 09:56:00.281008796 +0000 UTC m=+1059.716831303" Dec 04 09:56:00 crc kubenswrapper[4707]: I1204 09:56:00.817248 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:56:00 crc kubenswrapper[4707]: I1204 09:56:00.817634 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:56:00 crc kubenswrapper[4707]: I1204 09:56:00.817693 4707 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:56:00 crc kubenswrapper[4707]: I1204 09:56:00.818413 4707 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7beea26bd12c9b8a3dede9145f5d95cbd909ef83792c0ec9ae43b628c20e8918"} pod="openshift-machine-config-operator/machine-config-daemon-c244z" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 09:56:00 crc kubenswrapper[4707]: I1204 09:56:00.818482 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" containerID="cri-o://7beea26bd12c9b8a3dede9145f5d95cbd909ef83792c0ec9ae43b628c20e8918" gracePeriod=600 Dec 04 09:56:01 crc kubenswrapper[4707]: I1204 09:56:01.265635 4707 generic.go:334] "Generic (PLEG): container finished" podID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerID="7beea26bd12c9b8a3dede9145f5d95cbd909ef83792c0ec9ae43b628c20e8918" exitCode=0 Dec 04 09:56:01 crc kubenswrapper[4707]: I1204 09:56:01.265695 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerDied","Data":"7beea26bd12c9b8a3dede9145f5d95cbd909ef83792c0ec9ae43b628c20e8918"} Dec 04 09:56:01 crc kubenswrapper[4707]: I1204 09:56:01.266054 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerStarted","Data":"e65929a2443d875614d7254731d235fbb949ab647cb08abccd54722f9ef3c29b"} Dec 04 09:56:01 crc kubenswrapper[4707]: I1204 09:56:01.266076 4707 scope.go:117] "RemoveContainer" containerID="38c08b074cc460fc3513402fc630433eabee9cc90aab57117db09d7ee10fc03a" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.189665 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-68759b947-999sr"] Dec 04 09:56:10 crc kubenswrapper[4707]: E1204 09:56:10.190522 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="437bf12b-06d2-4fed-a6ff-c5b65eea01fe" containerName="util" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.190537 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="437bf12b-06d2-4fed-a6ff-c5b65eea01fe" containerName="util" Dec 04 09:56:10 crc kubenswrapper[4707]: E1204 09:56:10.190555 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="437bf12b-06d2-4fed-a6ff-c5b65eea01fe" containerName="pull" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.190562 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="437bf12b-06d2-4fed-a6ff-c5b65eea01fe" containerName="pull" Dec 04 09:56:10 crc kubenswrapper[4707]: E1204 09:56:10.190578 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="437bf12b-06d2-4fed-a6ff-c5b65eea01fe" containerName="extract" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.190584 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="437bf12b-06d2-4fed-a6ff-c5b65eea01fe" containerName="extract" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.190704 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="437bf12b-06d2-4fed-a6ff-c5b65eea01fe" containerName="extract" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.191134 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.193150 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-r2srf" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.193534 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-service-cert" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.204925 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-68759b947-999sr"] Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.301763 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e85dcf5-11e1-48ad-b884-404af35dd76a-webhook-cert\") pod \"keystone-operator-controller-manager-68759b947-999sr\" (UID: \"6e85dcf5-11e1-48ad-b884-404af35dd76a\") " pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.301856 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7p94\" (UniqueName: \"kubernetes.io/projected/6e85dcf5-11e1-48ad-b884-404af35dd76a-kube-api-access-b7p94\") pod \"keystone-operator-controller-manager-68759b947-999sr\" (UID: \"6e85dcf5-11e1-48ad-b884-404af35dd76a\") " pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.301919 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e85dcf5-11e1-48ad-b884-404af35dd76a-apiservice-cert\") pod \"keystone-operator-controller-manager-68759b947-999sr\" (UID: \"6e85dcf5-11e1-48ad-b884-404af35dd76a\") " pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.403043 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e85dcf5-11e1-48ad-b884-404af35dd76a-webhook-cert\") pod \"keystone-operator-controller-manager-68759b947-999sr\" (UID: \"6e85dcf5-11e1-48ad-b884-404af35dd76a\") " pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.403124 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7p94\" (UniqueName: \"kubernetes.io/projected/6e85dcf5-11e1-48ad-b884-404af35dd76a-kube-api-access-b7p94\") pod \"keystone-operator-controller-manager-68759b947-999sr\" (UID: \"6e85dcf5-11e1-48ad-b884-404af35dd76a\") " pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.403165 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e85dcf5-11e1-48ad-b884-404af35dd76a-apiservice-cert\") pod \"keystone-operator-controller-manager-68759b947-999sr\" (UID: \"6e85dcf5-11e1-48ad-b884-404af35dd76a\") " pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.408842 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e85dcf5-11e1-48ad-b884-404af35dd76a-apiservice-cert\") pod \"keystone-operator-controller-manager-68759b947-999sr\" (UID: \"6e85dcf5-11e1-48ad-b884-404af35dd76a\") " pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.409021 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e85dcf5-11e1-48ad-b884-404af35dd76a-webhook-cert\") pod \"keystone-operator-controller-manager-68759b947-999sr\" (UID: \"6e85dcf5-11e1-48ad-b884-404af35dd76a\") " pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.422430 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7p94\" (UniqueName: \"kubernetes.io/projected/6e85dcf5-11e1-48ad-b884-404af35dd76a-kube-api-access-b7p94\") pod \"keystone-operator-controller-manager-68759b947-999sr\" (UID: \"6e85dcf5-11e1-48ad-b884-404af35dd76a\") " pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 09:56:10 crc kubenswrapper[4707]: I1204 09:56:10.510639 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 09:56:11 crc kubenswrapper[4707]: I1204 09:56:11.177167 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-68759b947-999sr"] Dec 04 09:56:11 crc kubenswrapper[4707]: I1204 09:56:11.187957 4707 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 09:56:11 crc kubenswrapper[4707]: I1204 09:56:11.341101 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" event={"ID":"6e85dcf5-11e1-48ad-b884-404af35dd76a","Type":"ContainerStarted","Data":"71d2e96852e2d280458063073c8e72c5a79ae223dd3ca5d10b7a94944efc0c52"} Dec 04 09:56:16 crc kubenswrapper[4707]: I1204 09:56:16.405219 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" event={"ID":"6e85dcf5-11e1-48ad-b884-404af35dd76a","Type":"ContainerStarted","Data":"aaee4919027abbabb97937db3e4d69c1bcafdf4c19291841d8bb39dcc9c33d24"} Dec 04 09:56:16 crc kubenswrapper[4707]: I1204 09:56:16.406867 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 09:56:16 crc kubenswrapper[4707]: I1204 09:56:16.669221 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 09:56:16 crc kubenswrapper[4707]: I1204 09:56:16.690121 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" podStartSLOduration=2.682468675 podStartE2EDuration="6.690056876s" podCreationTimestamp="2025-12-04 09:56:10 +0000 UTC" firstStartedPulling="2025-12-04 09:56:11.187678055 +0000 UTC m=+1070.623500562" lastFinishedPulling="2025-12-04 09:56:15.195266256 +0000 UTC m=+1074.631088763" observedRunningTime="2025-12-04 09:56:16.430201406 +0000 UTC m=+1075.866023913" watchObservedRunningTime="2025-12-04 09:56:16.690056876 +0000 UTC m=+1076.125879383" Dec 04 09:56:20 crc kubenswrapper[4707]: I1204 09:56:20.517154 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.201275 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/keystone-db-create-pbqch"] Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.202765 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-create-pbqch" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.215977 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/keystone-3277-account-create-update-qv7m7"] Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.217534 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-3277-account-create-update-qv7m7" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.220959 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-db-create-pbqch"] Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.221356 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-db-secret" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.241672 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-3277-account-create-update-qv7m7"] Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.308181 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzvcp\" (UniqueName: \"kubernetes.io/projected/7506a3de-2989-488c-8f6d-f3c566d0c682-kube-api-access-gzvcp\") pod \"keystone-3277-account-create-update-qv7m7\" (UID: \"7506a3de-2989-488c-8f6d-f3c566d0c682\") " pod="manila-kuttl-tests/keystone-3277-account-create-update-qv7m7" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.308317 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59418610-f02f-4d29-b520-6fccfadad63e-operator-scripts\") pod \"keystone-db-create-pbqch\" (UID: \"59418610-f02f-4d29-b520-6fccfadad63e\") " pod="manila-kuttl-tests/keystone-db-create-pbqch" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.308378 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvztm\" (UniqueName: \"kubernetes.io/projected/59418610-f02f-4d29-b520-6fccfadad63e-kube-api-access-qvztm\") pod \"keystone-db-create-pbqch\" (UID: \"59418610-f02f-4d29-b520-6fccfadad63e\") " pod="manila-kuttl-tests/keystone-db-create-pbqch" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.308443 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7506a3de-2989-488c-8f6d-f3c566d0c682-operator-scripts\") pod \"keystone-3277-account-create-update-qv7m7\" (UID: \"7506a3de-2989-488c-8f6d-f3c566d0c682\") " pod="manila-kuttl-tests/keystone-3277-account-create-update-qv7m7" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.409530 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzvcp\" (UniqueName: \"kubernetes.io/projected/7506a3de-2989-488c-8f6d-f3c566d0c682-kube-api-access-gzvcp\") pod \"keystone-3277-account-create-update-qv7m7\" (UID: \"7506a3de-2989-488c-8f6d-f3c566d0c682\") " pod="manila-kuttl-tests/keystone-3277-account-create-update-qv7m7" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.409679 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59418610-f02f-4d29-b520-6fccfadad63e-operator-scripts\") pod \"keystone-db-create-pbqch\" (UID: \"59418610-f02f-4d29-b520-6fccfadad63e\") " pod="manila-kuttl-tests/keystone-db-create-pbqch" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.409729 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvztm\" (UniqueName: \"kubernetes.io/projected/59418610-f02f-4d29-b520-6fccfadad63e-kube-api-access-qvztm\") pod \"keystone-db-create-pbqch\" (UID: \"59418610-f02f-4d29-b520-6fccfadad63e\") " pod="manila-kuttl-tests/keystone-db-create-pbqch" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.409810 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7506a3de-2989-488c-8f6d-f3c566d0c682-operator-scripts\") pod \"keystone-3277-account-create-update-qv7m7\" (UID: \"7506a3de-2989-488c-8f6d-f3c566d0c682\") " pod="manila-kuttl-tests/keystone-3277-account-create-update-qv7m7" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.410845 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59418610-f02f-4d29-b520-6fccfadad63e-operator-scripts\") pod \"keystone-db-create-pbqch\" (UID: \"59418610-f02f-4d29-b520-6fccfadad63e\") " pod="manila-kuttl-tests/keystone-db-create-pbqch" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.410932 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7506a3de-2989-488c-8f6d-f3c566d0c682-operator-scripts\") pod \"keystone-3277-account-create-update-qv7m7\" (UID: \"7506a3de-2989-488c-8f6d-f3c566d0c682\") " pod="manila-kuttl-tests/keystone-3277-account-create-update-qv7m7" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.431283 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzvcp\" (UniqueName: \"kubernetes.io/projected/7506a3de-2989-488c-8f6d-f3c566d0c682-kube-api-access-gzvcp\") pod \"keystone-3277-account-create-update-qv7m7\" (UID: \"7506a3de-2989-488c-8f6d-f3c566d0c682\") " pod="manila-kuttl-tests/keystone-3277-account-create-update-qv7m7" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.431326 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvztm\" (UniqueName: \"kubernetes.io/projected/59418610-f02f-4d29-b520-6fccfadad63e-kube-api-access-qvztm\") pod \"keystone-db-create-pbqch\" (UID: \"59418610-f02f-4d29-b520-6fccfadad63e\") " pod="manila-kuttl-tests/keystone-db-create-pbqch" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.519431 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-create-pbqch" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.533466 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-3277-account-create-update-qv7m7" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.601854 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/ceph"] Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.603123 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/ceph" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.606115 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"default-dockercfg-zzkl2" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.714032 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-run\") pod \"ceph\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " pod="manila-kuttl-tests/ceph" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.715086 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-data\") pod \"ceph\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " pod="manila-kuttl-tests/ceph" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.715552 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpj7t\" (UniqueName: \"kubernetes.io/projected/ae5166d7-2ccb-4e29-8066-7b355eb947cc-kube-api-access-xpj7t\") pod \"ceph\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " pod="manila-kuttl-tests/ceph" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.715585 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-log\") pod \"ceph\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " pod="manila-kuttl-tests/ceph" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.819301 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-log\") pod \"ceph\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " pod="manila-kuttl-tests/ceph" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.819417 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-run\") pod \"ceph\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " pod="manila-kuttl-tests/ceph" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.819527 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-data\") pod \"ceph\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " pod="manila-kuttl-tests/ceph" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.819560 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpj7t\" (UniqueName: \"kubernetes.io/projected/ae5166d7-2ccb-4e29-8066-7b355eb947cc-kube-api-access-xpj7t\") pod \"ceph\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " pod="manila-kuttl-tests/ceph" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.820242 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-log\") pod \"ceph\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " pod="manila-kuttl-tests/ceph" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.820443 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-run\") pod \"ceph\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " pod="manila-kuttl-tests/ceph" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.820632 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-data\") pod \"ceph\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " pod="manila-kuttl-tests/ceph" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.845067 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpj7t\" (UniqueName: \"kubernetes.io/projected/ae5166d7-2ccb-4e29-8066-7b355eb947cc-kube-api-access-xpj7t\") pod \"ceph\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " pod="manila-kuttl-tests/ceph" Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.859803 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-db-create-pbqch"] Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.907139 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-3277-account-create-update-qv7m7"] Dec 04 09:56:22 crc kubenswrapper[4707]: I1204 09:56:22.925515 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/ceph" Dec 04 09:56:22 crc kubenswrapper[4707]: W1204 09:56:22.949263 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae5166d7_2ccb_4e29_8066_7b355eb947cc.slice/crio-2dda0991b27aa38dae5fc8c01e5d78024648c94236ffaa22bf82d5f34018df9a WatchSource:0}: Error finding container 2dda0991b27aa38dae5fc8c01e5d78024648c94236ffaa22bf82d5f34018df9a: Status 404 returned error can't find the container with id 2dda0991b27aa38dae5fc8c01e5d78024648c94236ffaa22bf82d5f34018df9a Dec 04 09:56:23 crc kubenswrapper[4707]: I1204 09:56:23.453292 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/ceph" event={"ID":"ae5166d7-2ccb-4e29-8066-7b355eb947cc","Type":"ContainerStarted","Data":"2dda0991b27aa38dae5fc8c01e5d78024648c94236ffaa22bf82d5f34018df9a"} Dec 04 09:56:23 crc kubenswrapper[4707]: I1204 09:56:23.455205 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-3277-account-create-update-qv7m7" event={"ID":"7506a3de-2989-488c-8f6d-f3c566d0c682","Type":"ContainerStarted","Data":"820b59889ca7d49475c508be15c8abe3794646dd56288dccd4fd83f5ad62e8a7"} Dec 04 09:56:23 crc kubenswrapper[4707]: I1204 09:56:23.457099 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-db-create-pbqch" event={"ID":"59418610-f02f-4d29-b520-6fccfadad63e","Type":"ContainerStarted","Data":"b2ae167ef596e033c98b23c7351d90c006c306107eb65c54d7e494280cd02083"} Dec 04 09:56:24 crc kubenswrapper[4707]: I1204 09:56:24.467304 4707 generic.go:334] "Generic (PLEG): container finished" podID="59418610-f02f-4d29-b520-6fccfadad63e" containerID="a3683d7f45d85789b8bb6a9cd1952f8450f08efb65c0b043d690541db88ff7f5" exitCode=0 Dec 04 09:56:24 crc kubenswrapper[4707]: I1204 09:56:24.467511 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-db-create-pbqch" event={"ID":"59418610-f02f-4d29-b520-6fccfadad63e","Type":"ContainerDied","Data":"a3683d7f45d85789b8bb6a9cd1952f8450f08efb65c0b043d690541db88ff7f5"} Dec 04 09:56:24 crc kubenswrapper[4707]: I1204 09:56:24.469695 4707 generic.go:334] "Generic (PLEG): container finished" podID="7506a3de-2989-488c-8f6d-f3c566d0c682" containerID="6ab8fa43ecee348c0d9630bf76404b132a3b945ccf9bb54c3cffd1b2939e95b4" exitCode=0 Dec 04 09:56:24 crc kubenswrapper[4707]: I1204 09:56:24.469726 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-3277-account-create-update-qv7m7" event={"ID":"7506a3de-2989-488c-8f6d-f3c566d0c682","Type":"ContainerDied","Data":"6ab8fa43ecee348c0d9630bf76404b132a3b945ccf9bb54c3cffd1b2939e95b4"} Dec 04 09:56:25 crc kubenswrapper[4707]: I1204 09:56:25.900305 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-3277-account-create-update-qv7m7" Dec 04 09:56:25 crc kubenswrapper[4707]: I1204 09:56:25.906894 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-create-pbqch" Dec 04 09:56:25 crc kubenswrapper[4707]: I1204 09:56:25.975808 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59418610-f02f-4d29-b520-6fccfadad63e-operator-scripts\") pod \"59418610-f02f-4d29-b520-6fccfadad63e\" (UID: \"59418610-f02f-4d29-b520-6fccfadad63e\") " Dec 04 09:56:25 crc kubenswrapper[4707]: I1204 09:56:25.975888 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7506a3de-2989-488c-8f6d-f3c566d0c682-operator-scripts\") pod \"7506a3de-2989-488c-8f6d-f3c566d0c682\" (UID: \"7506a3de-2989-488c-8f6d-f3c566d0c682\") " Dec 04 09:56:25 crc kubenswrapper[4707]: I1204 09:56:25.975939 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzvcp\" (UniqueName: \"kubernetes.io/projected/7506a3de-2989-488c-8f6d-f3c566d0c682-kube-api-access-gzvcp\") pod \"7506a3de-2989-488c-8f6d-f3c566d0c682\" (UID: \"7506a3de-2989-488c-8f6d-f3c566d0c682\") " Dec 04 09:56:25 crc kubenswrapper[4707]: I1204 09:56:25.975978 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvztm\" (UniqueName: \"kubernetes.io/projected/59418610-f02f-4d29-b520-6fccfadad63e-kube-api-access-qvztm\") pod \"59418610-f02f-4d29-b520-6fccfadad63e\" (UID: \"59418610-f02f-4d29-b520-6fccfadad63e\") " Dec 04 09:56:25 crc kubenswrapper[4707]: I1204 09:56:25.976594 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59418610-f02f-4d29-b520-6fccfadad63e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "59418610-f02f-4d29-b520-6fccfadad63e" (UID: "59418610-f02f-4d29-b520-6fccfadad63e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:56:25 crc kubenswrapper[4707]: I1204 09:56:25.976604 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7506a3de-2989-488c-8f6d-f3c566d0c682-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7506a3de-2989-488c-8f6d-f3c566d0c682" (UID: "7506a3de-2989-488c-8f6d-f3c566d0c682"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:56:25 crc kubenswrapper[4707]: I1204 09:56:25.992539 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7506a3de-2989-488c-8f6d-f3c566d0c682-kube-api-access-gzvcp" (OuterVolumeSpecName: "kube-api-access-gzvcp") pod "7506a3de-2989-488c-8f6d-f3c566d0c682" (UID: "7506a3de-2989-488c-8f6d-f3c566d0c682"). InnerVolumeSpecName "kube-api-access-gzvcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:56:26 crc kubenswrapper[4707]: I1204 09:56:25.995518 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59418610-f02f-4d29-b520-6fccfadad63e-kube-api-access-qvztm" (OuterVolumeSpecName: "kube-api-access-qvztm") pod "59418610-f02f-4d29-b520-6fccfadad63e" (UID: "59418610-f02f-4d29-b520-6fccfadad63e"). InnerVolumeSpecName "kube-api-access-qvztm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:56:26 crc kubenswrapper[4707]: I1204 09:56:26.078579 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59418610-f02f-4d29-b520-6fccfadad63e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 09:56:26 crc kubenswrapper[4707]: I1204 09:56:26.078633 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7506a3de-2989-488c-8f6d-f3c566d0c682-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 09:56:26 crc kubenswrapper[4707]: I1204 09:56:26.078645 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzvcp\" (UniqueName: \"kubernetes.io/projected/7506a3de-2989-488c-8f6d-f3c566d0c682-kube-api-access-gzvcp\") on node \"crc\" DevicePath \"\"" Dec 04 09:56:26 crc kubenswrapper[4707]: I1204 09:56:26.078662 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvztm\" (UniqueName: \"kubernetes.io/projected/59418610-f02f-4d29-b520-6fccfadad63e-kube-api-access-qvztm\") on node \"crc\" DevicePath \"\"" Dec 04 09:56:26 crc kubenswrapper[4707]: I1204 09:56:26.489930 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-3277-account-create-update-qv7m7" Dec 04 09:56:26 crc kubenswrapper[4707]: I1204 09:56:26.489935 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-3277-account-create-update-qv7m7" event={"ID":"7506a3de-2989-488c-8f6d-f3c566d0c682","Type":"ContainerDied","Data":"820b59889ca7d49475c508be15c8abe3794646dd56288dccd4fd83f5ad62e8a7"} Dec 04 09:56:26 crc kubenswrapper[4707]: I1204 09:56:26.490406 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="820b59889ca7d49475c508be15c8abe3794646dd56288dccd4fd83f5ad62e8a7" Dec 04 09:56:26 crc kubenswrapper[4707]: I1204 09:56:26.491244 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-db-create-pbqch" event={"ID":"59418610-f02f-4d29-b520-6fccfadad63e","Type":"ContainerDied","Data":"b2ae167ef596e033c98b23c7351d90c006c306107eb65c54d7e494280cd02083"} Dec 04 09:56:26 crc kubenswrapper[4707]: I1204 09:56:26.491266 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2ae167ef596e033c98b23c7351d90c006c306107eb65c54d7e494280cd02083" Dec 04 09:56:26 crc kubenswrapper[4707]: I1204 09:56:26.491302 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-create-pbqch" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.793291 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/keystone-db-sync-b4xlv"] Dec 04 09:56:27 crc kubenswrapper[4707]: E1204 09:56:27.793546 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7506a3de-2989-488c-8f6d-f3c566d0c682" containerName="mariadb-account-create-update" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.793558 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="7506a3de-2989-488c-8f6d-f3c566d0c682" containerName="mariadb-account-create-update" Dec 04 09:56:27 crc kubenswrapper[4707]: E1204 09:56:27.793571 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59418610-f02f-4d29-b520-6fccfadad63e" containerName="mariadb-database-create" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.793577 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="59418610-f02f-4d29-b520-6fccfadad63e" containerName="mariadb-database-create" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.793687 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="59418610-f02f-4d29-b520-6fccfadad63e" containerName="mariadb-database-create" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.793702 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="7506a3de-2989-488c-8f6d-f3c566d0c682" containerName="mariadb-account-create-update" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.794088 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-sync-b4xlv" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.798301 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-keystone-dockercfg-sg26g" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.798493 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-config-data" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.798542 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-scripts" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.800155 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.801119 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-db-sync-b4xlv"] Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.815352 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tntx\" (UniqueName: \"kubernetes.io/projected/d84e4321-66f6-4c4b-842b-ada408c4c446-kube-api-access-8tntx\") pod \"keystone-db-sync-b4xlv\" (UID: \"d84e4321-66f6-4c4b-842b-ada408c4c446\") " pod="manila-kuttl-tests/keystone-db-sync-b4xlv" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.815417 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d84e4321-66f6-4c4b-842b-ada408c4c446-config-data\") pod \"keystone-db-sync-b4xlv\" (UID: \"d84e4321-66f6-4c4b-842b-ada408c4c446\") " pod="manila-kuttl-tests/keystone-db-sync-b4xlv" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.917024 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tntx\" (UniqueName: \"kubernetes.io/projected/d84e4321-66f6-4c4b-842b-ada408c4c446-kube-api-access-8tntx\") pod \"keystone-db-sync-b4xlv\" (UID: \"d84e4321-66f6-4c4b-842b-ada408c4c446\") " pod="manila-kuttl-tests/keystone-db-sync-b4xlv" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.917110 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d84e4321-66f6-4c4b-842b-ada408c4c446-config-data\") pod \"keystone-db-sync-b4xlv\" (UID: \"d84e4321-66f6-4c4b-842b-ada408c4c446\") " pod="manila-kuttl-tests/keystone-db-sync-b4xlv" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.924308 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d84e4321-66f6-4c4b-842b-ada408c4c446-config-data\") pod \"keystone-db-sync-b4xlv\" (UID: \"d84e4321-66f6-4c4b-842b-ada408c4c446\") " pod="manila-kuttl-tests/keystone-db-sync-b4xlv" Dec 04 09:56:27 crc kubenswrapper[4707]: I1204 09:56:27.933917 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tntx\" (UniqueName: \"kubernetes.io/projected/d84e4321-66f6-4c4b-842b-ada408c4c446-kube-api-access-8tntx\") pod \"keystone-db-sync-b4xlv\" (UID: \"d84e4321-66f6-4c4b-842b-ada408c4c446\") " pod="manila-kuttl-tests/keystone-db-sync-b4xlv" Dec 04 09:56:28 crc kubenswrapper[4707]: I1204 09:56:28.113228 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-sync-b4xlv" Dec 04 09:56:30 crc kubenswrapper[4707]: I1204 09:56:30.194263 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-db-sync-b4xlv"] Dec 04 09:56:30 crc kubenswrapper[4707]: I1204 09:56:30.532647 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-db-sync-b4xlv" event={"ID":"d84e4321-66f6-4c4b-842b-ada408c4c446","Type":"ContainerStarted","Data":"3483fa243036049f91cae3c40c5128ed477590d78b5a76b7b5ebce94fb115a59"} Dec 04 09:56:54 crc kubenswrapper[4707]: E1204 09:56:54.809934 4707 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/ceph/demo:latest-squid" Dec 04 09:56:54 crc kubenswrapper[4707]: E1204 09:56:54.810697 4707 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceph,Image:quay.io/ceph/demo:latest-squid,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:MON_IP,Value:192.168.126.11,ValueFrom:nil,},EnvVar{Name:CEPH_DAEMON,Value:demo,ValueFrom:nil,},EnvVar{Name:CEPH_PUBLIC_NETWORK,Value:0.0.0.0/0,ValueFrom:nil,},EnvVar{Name:DEMO_DAEMONS,Value:osd,mds,rgw,ValueFrom:nil,},EnvVar{Name:CEPH_DEMO_UID,Value:0,ValueFrom:nil,},EnvVar{Name:RGW_NAME,Value:ceph,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:data,ReadOnly:false,MountPath:/var/lib/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log,ReadOnly:false,MountPath:/var/log/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run,ReadOnly:false,MountPath:/run/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xpj7t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceph_manila-kuttl-tests(ae5166d7-2ccb-4e29-8066-7b355eb947cc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 09:56:54 crc kubenswrapper[4707]: E1204 09:56:54.811875 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceph\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="manila-kuttl-tests/ceph" podUID="ae5166d7-2ccb-4e29-8066-7b355eb947cc" Dec 04 09:56:55 crc kubenswrapper[4707]: I1204 09:56:55.770408 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-db-sync-b4xlv" event={"ID":"d84e4321-66f6-4c4b-842b-ada408c4c446","Type":"ContainerStarted","Data":"24d45e78e1e066edfd4d4e2a448c7b535d8b056b02ae1bcbc750910271d2934d"} Dec 04 09:56:55 crc kubenswrapper[4707]: E1204 09:56:55.771728 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceph\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/ceph/demo:latest-squid\\\"\"" pod="manila-kuttl-tests/ceph" podUID="ae5166d7-2ccb-4e29-8066-7b355eb947cc" Dec 04 09:56:55 crc kubenswrapper[4707]: I1204 09:56:55.822727 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/keystone-db-sync-b4xlv" podStartSLOduration=4.322378706 podStartE2EDuration="28.822702756s" podCreationTimestamp="2025-12-04 09:56:27 +0000 UTC" firstStartedPulling="2025-12-04 09:56:30.205473085 +0000 UTC m=+1089.641295592" lastFinishedPulling="2025-12-04 09:56:54.705797135 +0000 UTC m=+1114.141619642" observedRunningTime="2025-12-04 09:56:55.817866573 +0000 UTC m=+1115.253689080" watchObservedRunningTime="2025-12-04 09:56:55.822702756 +0000 UTC m=+1115.258525263" Dec 04 09:56:58 crc kubenswrapper[4707]: I1204 09:56:58.789962 4707 generic.go:334] "Generic (PLEG): container finished" podID="d84e4321-66f6-4c4b-842b-ada408c4c446" containerID="24d45e78e1e066edfd4d4e2a448c7b535d8b056b02ae1bcbc750910271d2934d" exitCode=0 Dec 04 09:56:58 crc kubenswrapper[4707]: I1204 09:56:58.790107 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-db-sync-b4xlv" event={"ID":"d84e4321-66f6-4c4b-842b-ada408c4c446","Type":"ContainerDied","Data":"24d45e78e1e066edfd4d4e2a448c7b535d8b056b02ae1bcbc750910271d2934d"} Dec 04 09:57:00 crc kubenswrapper[4707]: I1204 09:57:00.079748 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-sync-b4xlv" Dec 04 09:57:00 crc kubenswrapper[4707]: I1204 09:57:00.251280 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tntx\" (UniqueName: \"kubernetes.io/projected/d84e4321-66f6-4c4b-842b-ada408c4c446-kube-api-access-8tntx\") pod \"d84e4321-66f6-4c4b-842b-ada408c4c446\" (UID: \"d84e4321-66f6-4c4b-842b-ada408c4c446\") " Dec 04 09:57:00 crc kubenswrapper[4707]: I1204 09:57:00.251380 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d84e4321-66f6-4c4b-842b-ada408c4c446-config-data\") pod \"d84e4321-66f6-4c4b-842b-ada408c4c446\" (UID: \"d84e4321-66f6-4c4b-842b-ada408c4c446\") " Dec 04 09:57:00 crc kubenswrapper[4707]: I1204 09:57:00.256588 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d84e4321-66f6-4c4b-842b-ada408c4c446-kube-api-access-8tntx" (OuterVolumeSpecName: "kube-api-access-8tntx") pod "d84e4321-66f6-4c4b-842b-ada408c4c446" (UID: "d84e4321-66f6-4c4b-842b-ada408c4c446"). InnerVolumeSpecName "kube-api-access-8tntx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:57:00 crc kubenswrapper[4707]: I1204 09:57:00.285087 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d84e4321-66f6-4c4b-842b-ada408c4c446-config-data" (OuterVolumeSpecName: "config-data") pod "d84e4321-66f6-4c4b-842b-ada408c4c446" (UID: "d84e4321-66f6-4c4b-842b-ada408c4c446"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:57:00 crc kubenswrapper[4707]: I1204 09:57:00.353268 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tntx\" (UniqueName: \"kubernetes.io/projected/d84e4321-66f6-4c4b-842b-ada408c4c446-kube-api-access-8tntx\") on node \"crc\" DevicePath \"\"" Dec 04 09:57:00 crc kubenswrapper[4707]: I1204 09:57:00.353320 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d84e4321-66f6-4c4b-842b-ada408c4c446-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 09:57:00 crc kubenswrapper[4707]: I1204 09:57:00.815412 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-db-sync-b4xlv" event={"ID":"d84e4321-66f6-4c4b-842b-ada408c4c446","Type":"ContainerDied","Data":"3483fa243036049f91cae3c40c5128ed477590d78b5a76b7b5ebce94fb115a59"} Dec 04 09:57:00 crc kubenswrapper[4707]: I1204 09:57:00.815640 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3483fa243036049f91cae3c40c5128ed477590d78b5a76b7b5ebce94fb115a59" Dec 04 09:57:00 crc kubenswrapper[4707]: I1204 09:57:00.815750 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-sync-b4xlv" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.016369 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/keystone-bootstrap-v9ng7"] Dec 04 09:57:01 crc kubenswrapper[4707]: E1204 09:57:01.017029 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d84e4321-66f6-4c4b-842b-ada408c4c446" containerName="keystone-db-sync" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.017042 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="d84e4321-66f6-4c4b-842b-ada408c4c446" containerName="keystone-db-sync" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.017170 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="d84e4321-66f6-4c4b-842b-ada408c4c446" containerName="keystone-db-sync" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.017628 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.023431 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-scripts" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.023505 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"osp-secret" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.023633 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-config-data" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.024186 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.025889 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-keystone-dockercfg-sg26g" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.031546 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-bootstrap-v9ng7"] Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.162615 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-fernet-keys\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.162691 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-credential-keys\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.162745 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jzfr\" (UniqueName: \"kubernetes.io/projected/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-kube-api-access-9jzfr\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.162902 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-scripts\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.162998 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-config-data\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.264524 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-config-data\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.264634 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-fernet-keys\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.264679 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-credential-keys\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.264728 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jzfr\" (UniqueName: \"kubernetes.io/projected/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-kube-api-access-9jzfr\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.264780 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-scripts\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.270073 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-config-data\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.270347 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-credential-keys\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.270597 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-fernet-keys\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.272147 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-scripts\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.286022 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jzfr\" (UniqueName: \"kubernetes.io/projected/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-kube-api-access-9jzfr\") pod \"keystone-bootstrap-v9ng7\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.337712 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.808812 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-bootstrap-v9ng7"] Dec 04 09:57:01 crc kubenswrapper[4707]: I1204 09:57:01.822827 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" event={"ID":"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d","Type":"ContainerStarted","Data":"e1875f8930e749c90eb348b234bc04633914e56078774f193fd9cc782b926b2a"} Dec 04 09:57:03 crc kubenswrapper[4707]: I1204 09:57:03.837759 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" event={"ID":"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d","Type":"ContainerStarted","Data":"9375f91b80d047dffc37d4a8d9986d6c47895fd272438cb3c2eae279d3cc3e93"} Dec 04 09:57:03 crc kubenswrapper[4707]: I1204 09:57:03.858406 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" podStartSLOduration=3.858381563 podStartE2EDuration="3.858381563s" podCreationTimestamp="2025-12-04 09:57:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:57:03.854929003 +0000 UTC m=+1123.290751510" watchObservedRunningTime="2025-12-04 09:57:03.858381563 +0000 UTC m=+1123.294204070" Dec 04 09:57:06 crc kubenswrapper[4707]: I1204 09:57:06.856816 4707 generic.go:334] "Generic (PLEG): container finished" podID="3a2f408d-5beb-4e5c-ac7f-547ea5383f7d" containerID="9375f91b80d047dffc37d4a8d9986d6c47895fd272438cb3c2eae279d3cc3e93" exitCode=0 Dec 04 09:57:06 crc kubenswrapper[4707]: I1204 09:57:06.856871 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" event={"ID":"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d","Type":"ContainerDied","Data":"9375f91b80d047dffc37d4a8d9986d6c47895fd272438cb3c2eae279d3cc3e93"} Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.169173 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.280286 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jzfr\" (UniqueName: \"kubernetes.io/projected/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-kube-api-access-9jzfr\") pod \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.280425 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-scripts\") pod \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.280515 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-fernet-keys\") pod \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.280542 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-config-data\") pod \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.280574 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-credential-keys\") pod \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\" (UID: \"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d\") " Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.286525 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-scripts" (OuterVolumeSpecName: "scripts") pod "3a2f408d-5beb-4e5c-ac7f-547ea5383f7d" (UID: "3a2f408d-5beb-4e5c-ac7f-547ea5383f7d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.287404 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-kube-api-access-9jzfr" (OuterVolumeSpecName: "kube-api-access-9jzfr") pod "3a2f408d-5beb-4e5c-ac7f-547ea5383f7d" (UID: "3a2f408d-5beb-4e5c-ac7f-547ea5383f7d"). InnerVolumeSpecName "kube-api-access-9jzfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.289061 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3a2f408d-5beb-4e5c-ac7f-547ea5383f7d" (UID: "3a2f408d-5beb-4e5c-ac7f-547ea5383f7d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.289349 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3a2f408d-5beb-4e5c-ac7f-547ea5383f7d" (UID: "3a2f408d-5beb-4e5c-ac7f-547ea5383f7d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.300991 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-config-data" (OuterVolumeSpecName: "config-data") pod "3a2f408d-5beb-4e5c-ac7f-547ea5383f7d" (UID: "3a2f408d-5beb-4e5c-ac7f-547ea5383f7d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.382233 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jzfr\" (UniqueName: \"kubernetes.io/projected/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-kube-api-access-9jzfr\") on node \"crc\" DevicePath \"\"" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.382283 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.382296 4707 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.382307 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.382320 4707 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.873012 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" event={"ID":"3a2f408d-5beb-4e5c-ac7f-547ea5383f7d","Type":"ContainerDied","Data":"e1875f8930e749c90eb348b234bc04633914e56078774f193fd9cc782b926b2a"} Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.873070 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1875f8930e749c90eb348b234bc04633914e56078774f193fd9cc782b926b2a" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.873166 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-bootstrap-v9ng7" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.960110 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/keystone-6b5497869-bq6qk"] Dec 04 09:57:08 crc kubenswrapper[4707]: E1204 09:57:08.960633 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a2f408d-5beb-4e5c-ac7f-547ea5383f7d" containerName="keystone-bootstrap" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.960668 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a2f408d-5beb-4e5c-ac7f-547ea5383f7d" containerName="keystone-bootstrap" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.960876 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a2f408d-5beb-4e5c-ac7f-547ea5383f7d" containerName="keystone-bootstrap" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.961704 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.967195 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-keystone-dockercfg-sg26g" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.967360 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-config-data" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.967467 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-scripts" Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.969328 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-6b5497869-bq6qk"] Dec 04 09:57:08 crc kubenswrapper[4707]: I1204 09:57:08.969966 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.092548 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-scripts\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.092625 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-credential-keys\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.092690 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-fernet-keys\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.092752 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9qg4\" (UniqueName: \"kubernetes.io/projected/37fbf63e-d62a-4ea7-b15c-c34c72aab829-kube-api-access-s9qg4\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.092808 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-config-data\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.194691 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-config-data\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.194789 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-scripts\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.194844 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-credential-keys\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.194872 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-fernet-keys\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.194902 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9qg4\" (UniqueName: \"kubernetes.io/projected/37fbf63e-d62a-4ea7-b15c-c34c72aab829-kube-api-access-s9qg4\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.200615 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-scripts\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.200930 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-credential-keys\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.203193 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-config-data\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.206086 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-fernet-keys\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.218076 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9qg4\" (UniqueName: \"kubernetes.io/projected/37fbf63e-d62a-4ea7-b15c-c34c72aab829-kube-api-access-s9qg4\") pod \"keystone-6b5497869-bq6qk\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.278542 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.711607 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-6b5497869-bq6qk"] Dec 04 09:57:09 crc kubenswrapper[4707]: I1204 09:57:09.970893 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" event={"ID":"37fbf63e-d62a-4ea7-b15c-c34c72aab829","Type":"ContainerStarted","Data":"a4f3183a4f65b026ec7e1d9b6e008b0d3f32c03d5c15f3c236411a860414c031"} Dec 04 09:57:11 crc kubenswrapper[4707]: I1204 09:57:11.983360 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" event={"ID":"37fbf63e-d62a-4ea7-b15c-c34c72aab829","Type":"ContainerStarted","Data":"8ea8246d529a17dc453e3611571e24306eb9996f15394da1492c37262bdd47fb"} Dec 04 09:57:11 crc kubenswrapper[4707]: I1204 09:57:11.984171 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:57:12 crc kubenswrapper[4707]: I1204 09:57:12.010133 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" podStartSLOduration=4.010114362 podStartE2EDuration="4.010114362s" podCreationTimestamp="2025-12-04 09:57:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:57:12.008759829 +0000 UTC m=+1131.444582336" watchObservedRunningTime="2025-12-04 09:57:12.010114362 +0000 UTC m=+1131.445936869" Dec 04 09:57:12 crc kubenswrapper[4707]: I1204 09:57:12.990099 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/ceph" event={"ID":"ae5166d7-2ccb-4e29-8066-7b355eb947cc","Type":"ContainerStarted","Data":"eae13d613b80c0afda601e087c0158271a01da5d49b078a2d0f91b27aeda12aa"} Dec 04 09:57:13 crc kubenswrapper[4707]: I1204 09:57:13.009824 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/ceph" podStartSLOduration=2.395152456 podStartE2EDuration="51.009803572s" podCreationTimestamp="2025-12-04 09:56:22 +0000 UTC" firstStartedPulling="2025-12-04 09:56:22.95275492 +0000 UTC m=+1082.388577427" lastFinishedPulling="2025-12-04 09:57:11.567406036 +0000 UTC m=+1131.003228543" observedRunningTime="2025-12-04 09:57:13.00501063 +0000 UTC m=+1132.440833137" watchObservedRunningTime="2025-12-04 09:57:13.009803572 +0000 UTC m=+1132.445626079" Dec 04 09:57:41 crc kubenswrapper[4707]: I1204 09:57:41.093954 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 09:58:21 crc kubenswrapper[4707]: I1204 09:58:21.795017 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-index-hlvgt"] Dec 04 09:58:21 crc kubenswrapper[4707]: I1204 09:58:21.796518 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-index-hlvgt" Dec 04 09:58:21 crc kubenswrapper[4707]: I1204 09:58:21.798728 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-index-dockercfg-clhgk" Dec 04 09:58:21 crc kubenswrapper[4707]: I1204 09:58:21.805871 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-index-hlvgt"] Dec 04 09:58:21 crc kubenswrapper[4707]: I1204 09:58:21.983845 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghg57\" (UniqueName: \"kubernetes.io/projected/0e6e6fc4-562a-4ca6-af63-40f7a45400cc-kube-api-access-ghg57\") pod \"manila-operator-index-hlvgt\" (UID: \"0e6e6fc4-562a-4ca6-af63-40f7a45400cc\") " pod="openstack-operators/manila-operator-index-hlvgt" Dec 04 09:58:22 crc kubenswrapper[4707]: I1204 09:58:22.085022 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghg57\" (UniqueName: \"kubernetes.io/projected/0e6e6fc4-562a-4ca6-af63-40f7a45400cc-kube-api-access-ghg57\") pod \"manila-operator-index-hlvgt\" (UID: \"0e6e6fc4-562a-4ca6-af63-40f7a45400cc\") " pod="openstack-operators/manila-operator-index-hlvgt" Dec 04 09:58:22 crc kubenswrapper[4707]: I1204 09:58:22.103840 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghg57\" (UniqueName: \"kubernetes.io/projected/0e6e6fc4-562a-4ca6-af63-40f7a45400cc-kube-api-access-ghg57\") pod \"manila-operator-index-hlvgt\" (UID: \"0e6e6fc4-562a-4ca6-af63-40f7a45400cc\") " pod="openstack-operators/manila-operator-index-hlvgt" Dec 04 09:58:22 crc kubenswrapper[4707]: I1204 09:58:22.118129 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-index-hlvgt" Dec 04 09:58:22 crc kubenswrapper[4707]: I1204 09:58:22.617874 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-index-hlvgt"] Dec 04 09:58:23 crc kubenswrapper[4707]: I1204 09:58:23.480667 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-index-hlvgt" event={"ID":"0e6e6fc4-562a-4ca6-af63-40f7a45400cc","Type":"ContainerStarted","Data":"a1e2e6d48e3bf7dd70dd44111f217039e8c31b59aa54c2e8a76fb31d4a3a6ad6"} Dec 04 09:58:25 crc kubenswrapper[4707]: I1204 09:58:25.496731 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-index-hlvgt" event={"ID":"0e6e6fc4-562a-4ca6-af63-40f7a45400cc","Type":"ContainerStarted","Data":"7032c18c1b048acf87017a94ece2a3160acaa8d1768de1c0935f19bb7975c949"} Dec 04 09:58:25 crc kubenswrapper[4707]: I1204 09:58:25.516304 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-index-hlvgt" podStartSLOduration=1.967385258 podStartE2EDuration="4.516286667s" podCreationTimestamp="2025-12-04 09:58:21 +0000 UTC" firstStartedPulling="2025-12-04 09:58:22.620090316 +0000 UTC m=+1202.055912823" lastFinishedPulling="2025-12-04 09:58:25.168991725 +0000 UTC m=+1204.604814232" observedRunningTime="2025-12-04 09:58:25.511444644 +0000 UTC m=+1204.947267151" watchObservedRunningTime="2025-12-04 09:58:25.516286667 +0000 UTC m=+1204.952109174" Dec 04 09:58:30 crc kubenswrapper[4707]: I1204 09:58:30.816644 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:58:30 crc kubenswrapper[4707]: I1204 09:58:30.817238 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:58:32 crc kubenswrapper[4707]: I1204 09:58:32.118557 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-index-hlvgt" Dec 04 09:58:32 crc kubenswrapper[4707]: I1204 09:58:32.118962 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/manila-operator-index-hlvgt" Dec 04 09:58:32 crc kubenswrapper[4707]: I1204 09:58:32.151283 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/manila-operator-index-hlvgt" Dec 04 09:58:32 crc kubenswrapper[4707]: I1204 09:58:32.564306 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-index-hlvgt" Dec 04 09:58:33 crc kubenswrapper[4707]: I1204 09:58:33.831536 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr"] Dec 04 09:58:33 crc kubenswrapper[4707]: I1204 09:58:33.833406 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" Dec 04 09:58:33 crc kubenswrapper[4707]: I1204 09:58:33.837650 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-zklcg" Dec 04 09:58:33 crc kubenswrapper[4707]: I1204 09:58:33.839025 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr"] Dec 04 09:58:33 crc kubenswrapper[4707]: I1204 09:58:33.948064 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnksj\" (UniqueName: \"kubernetes.io/projected/4f8839cc-9d4a-4cba-ab65-b5413391e72f-kube-api-access-qnksj\") pod \"5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr\" (UID: \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\") " pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" Dec 04 09:58:33 crc kubenswrapper[4707]: I1204 09:58:33.948201 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4f8839cc-9d4a-4cba-ab65-b5413391e72f-bundle\") pod \"5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr\" (UID: \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\") " pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" Dec 04 09:58:33 crc kubenswrapper[4707]: I1204 09:58:33.948250 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4f8839cc-9d4a-4cba-ab65-b5413391e72f-util\") pod \"5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr\" (UID: \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\") " pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" Dec 04 09:58:34 crc kubenswrapper[4707]: I1204 09:58:34.049771 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnksj\" (UniqueName: \"kubernetes.io/projected/4f8839cc-9d4a-4cba-ab65-b5413391e72f-kube-api-access-qnksj\") pod \"5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr\" (UID: \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\") " pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" Dec 04 09:58:34 crc kubenswrapper[4707]: I1204 09:58:34.049871 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4f8839cc-9d4a-4cba-ab65-b5413391e72f-bundle\") pod \"5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr\" (UID: \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\") " pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" Dec 04 09:58:34 crc kubenswrapper[4707]: I1204 09:58:34.049913 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4f8839cc-9d4a-4cba-ab65-b5413391e72f-util\") pod \"5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr\" (UID: \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\") " pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" Dec 04 09:58:34 crc kubenswrapper[4707]: I1204 09:58:34.050481 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4f8839cc-9d4a-4cba-ab65-b5413391e72f-util\") pod \"5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr\" (UID: \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\") " pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" Dec 04 09:58:34 crc kubenswrapper[4707]: I1204 09:58:34.050970 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4f8839cc-9d4a-4cba-ab65-b5413391e72f-bundle\") pod \"5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr\" (UID: \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\") " pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" Dec 04 09:58:34 crc kubenswrapper[4707]: I1204 09:58:34.069607 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnksj\" (UniqueName: \"kubernetes.io/projected/4f8839cc-9d4a-4cba-ab65-b5413391e72f-kube-api-access-qnksj\") pod \"5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr\" (UID: \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\") " pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" Dec 04 09:58:34 crc kubenswrapper[4707]: I1204 09:58:34.151410 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" Dec 04 09:58:34 crc kubenswrapper[4707]: I1204 09:58:34.581146 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr"] Dec 04 09:58:35 crc kubenswrapper[4707]: I1204 09:58:35.556775 4707 generic.go:334] "Generic (PLEG): container finished" podID="4f8839cc-9d4a-4cba-ab65-b5413391e72f" containerID="1351f5bd558010df344c3764b52c56571a496675b2e831eda286471ac011eb44" exitCode=0 Dec 04 09:58:35 crc kubenswrapper[4707]: I1204 09:58:35.556868 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" event={"ID":"4f8839cc-9d4a-4cba-ab65-b5413391e72f","Type":"ContainerDied","Data":"1351f5bd558010df344c3764b52c56571a496675b2e831eda286471ac011eb44"} Dec 04 09:58:35 crc kubenswrapper[4707]: I1204 09:58:35.557134 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" event={"ID":"4f8839cc-9d4a-4cba-ab65-b5413391e72f","Type":"ContainerStarted","Data":"996163d1ccae284c5685eae04db319d93c77d119745e43e771fd1713dc356d61"} Dec 04 09:58:36 crc kubenswrapper[4707]: I1204 09:58:36.566803 4707 generic.go:334] "Generic (PLEG): container finished" podID="4f8839cc-9d4a-4cba-ab65-b5413391e72f" containerID="3bedb78b68b1a236ce1c7df4a602bc6bb68ffed2c74c8c1d67b523e01d1bffe6" exitCode=0 Dec 04 09:58:36 crc kubenswrapper[4707]: I1204 09:58:36.566888 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" event={"ID":"4f8839cc-9d4a-4cba-ab65-b5413391e72f","Type":"ContainerDied","Data":"3bedb78b68b1a236ce1c7df4a602bc6bb68ffed2c74c8c1d67b523e01d1bffe6"} Dec 04 09:58:37 crc kubenswrapper[4707]: I1204 09:58:37.574824 4707 generic.go:334] "Generic (PLEG): container finished" podID="4f8839cc-9d4a-4cba-ab65-b5413391e72f" containerID="1d66559e5982470757c7a470726c3b3b76628e9d8b86363d0da3dd475beb469a" exitCode=0 Dec 04 09:58:37 crc kubenswrapper[4707]: I1204 09:58:37.574896 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" event={"ID":"4f8839cc-9d4a-4cba-ab65-b5413391e72f","Type":"ContainerDied","Data":"1d66559e5982470757c7a470726c3b3b76628e9d8b86363d0da3dd475beb469a"} Dec 04 09:58:38 crc kubenswrapper[4707]: I1204 09:58:38.855290 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" Dec 04 09:58:39 crc kubenswrapper[4707]: I1204 09:58:39.027805 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnksj\" (UniqueName: \"kubernetes.io/projected/4f8839cc-9d4a-4cba-ab65-b5413391e72f-kube-api-access-qnksj\") pod \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\" (UID: \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\") " Dec 04 09:58:39 crc kubenswrapper[4707]: I1204 09:58:39.028015 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4f8839cc-9d4a-4cba-ab65-b5413391e72f-util\") pod \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\" (UID: \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\") " Dec 04 09:58:39 crc kubenswrapper[4707]: I1204 09:58:39.028058 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4f8839cc-9d4a-4cba-ab65-b5413391e72f-bundle\") pod \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\" (UID: \"4f8839cc-9d4a-4cba-ab65-b5413391e72f\") " Dec 04 09:58:39 crc kubenswrapper[4707]: I1204 09:58:39.029895 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f8839cc-9d4a-4cba-ab65-b5413391e72f-bundle" (OuterVolumeSpecName: "bundle") pod "4f8839cc-9d4a-4cba-ab65-b5413391e72f" (UID: "4f8839cc-9d4a-4cba-ab65-b5413391e72f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:58:39 crc kubenswrapper[4707]: I1204 09:58:39.035105 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f8839cc-9d4a-4cba-ab65-b5413391e72f-kube-api-access-qnksj" (OuterVolumeSpecName: "kube-api-access-qnksj") pod "4f8839cc-9d4a-4cba-ab65-b5413391e72f" (UID: "4f8839cc-9d4a-4cba-ab65-b5413391e72f"). InnerVolumeSpecName "kube-api-access-qnksj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:58:39 crc kubenswrapper[4707]: I1204 09:58:39.042567 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f8839cc-9d4a-4cba-ab65-b5413391e72f-util" (OuterVolumeSpecName: "util") pod "4f8839cc-9d4a-4cba-ab65-b5413391e72f" (UID: "4f8839cc-9d4a-4cba-ab65-b5413391e72f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 09:58:39 crc kubenswrapper[4707]: I1204 09:58:39.129307 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnksj\" (UniqueName: \"kubernetes.io/projected/4f8839cc-9d4a-4cba-ab65-b5413391e72f-kube-api-access-qnksj\") on node \"crc\" DevicePath \"\"" Dec 04 09:58:39 crc kubenswrapper[4707]: I1204 09:58:39.129365 4707 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4f8839cc-9d4a-4cba-ab65-b5413391e72f-util\") on node \"crc\" DevicePath \"\"" Dec 04 09:58:39 crc kubenswrapper[4707]: I1204 09:58:39.129381 4707 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4f8839cc-9d4a-4cba-ab65-b5413391e72f-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 09:58:39 crc kubenswrapper[4707]: I1204 09:58:39.593145 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" event={"ID":"4f8839cc-9d4a-4cba-ab65-b5413391e72f","Type":"ContainerDied","Data":"996163d1ccae284c5685eae04db319d93c77d119745e43e771fd1713dc356d61"} Dec 04 09:58:39 crc kubenswrapper[4707]: I1204 09:58:39.593186 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr" Dec 04 09:58:39 crc kubenswrapper[4707]: I1204 09:58:39.593200 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="996163d1ccae284c5685eae04db319d93c77d119745e43e771fd1713dc356d61" Dec 04 09:58:48 crc kubenswrapper[4707]: I1204 09:58:48.875842 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb"] Dec 04 09:58:48 crc kubenswrapper[4707]: E1204 09:58:48.876674 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f8839cc-9d4a-4cba-ab65-b5413391e72f" containerName="util" Dec 04 09:58:48 crc kubenswrapper[4707]: I1204 09:58:48.876692 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f8839cc-9d4a-4cba-ab65-b5413391e72f" containerName="util" Dec 04 09:58:48 crc kubenswrapper[4707]: E1204 09:58:48.876712 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f8839cc-9d4a-4cba-ab65-b5413391e72f" containerName="extract" Dec 04 09:58:48 crc kubenswrapper[4707]: I1204 09:58:48.876720 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f8839cc-9d4a-4cba-ab65-b5413391e72f" containerName="extract" Dec 04 09:58:48 crc kubenswrapper[4707]: E1204 09:58:48.876736 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f8839cc-9d4a-4cba-ab65-b5413391e72f" containerName="pull" Dec 04 09:58:48 crc kubenswrapper[4707]: I1204 09:58:48.876743 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f8839cc-9d4a-4cba-ab65-b5413391e72f" containerName="pull" Dec 04 09:58:48 crc kubenswrapper[4707]: I1204 09:58:48.876894 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f8839cc-9d4a-4cba-ab65-b5413391e72f" containerName="extract" Dec 04 09:58:48 crc kubenswrapper[4707]: I1204 09:58:48.877302 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 09:58:48 crc kubenswrapper[4707]: I1204 09:58:48.879547 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-wmrrk" Dec 04 09:58:48 crc kubenswrapper[4707]: I1204 09:58:48.879729 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-service-cert" Dec 04 09:58:48 crc kubenswrapper[4707]: I1204 09:58:48.890017 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb"] Dec 04 09:58:48 crc kubenswrapper[4707]: I1204 09:58:48.984942 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/946cc19a-22ff-4a9b-862f-f9471d794bb0-webhook-cert\") pod \"manila-operator-controller-manager-7b6f6fdcbf-xmdtb\" (UID: \"946cc19a-22ff-4a9b-862f-f9471d794bb0\") " pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 09:58:48 crc kubenswrapper[4707]: I1204 09:58:48.985020 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzq4j\" (UniqueName: \"kubernetes.io/projected/946cc19a-22ff-4a9b-862f-f9471d794bb0-kube-api-access-pzq4j\") pod \"manila-operator-controller-manager-7b6f6fdcbf-xmdtb\" (UID: \"946cc19a-22ff-4a9b-862f-f9471d794bb0\") " pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 09:58:48 crc kubenswrapper[4707]: I1204 09:58:48.985060 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/946cc19a-22ff-4a9b-862f-f9471d794bb0-apiservice-cert\") pod \"manila-operator-controller-manager-7b6f6fdcbf-xmdtb\" (UID: \"946cc19a-22ff-4a9b-862f-f9471d794bb0\") " pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 09:58:49 crc kubenswrapper[4707]: I1204 09:58:49.086037 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/946cc19a-22ff-4a9b-862f-f9471d794bb0-apiservice-cert\") pod \"manila-operator-controller-manager-7b6f6fdcbf-xmdtb\" (UID: \"946cc19a-22ff-4a9b-862f-f9471d794bb0\") " pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 09:58:49 crc kubenswrapper[4707]: I1204 09:58:49.086448 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/946cc19a-22ff-4a9b-862f-f9471d794bb0-webhook-cert\") pod \"manila-operator-controller-manager-7b6f6fdcbf-xmdtb\" (UID: \"946cc19a-22ff-4a9b-862f-f9471d794bb0\") " pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 09:58:49 crc kubenswrapper[4707]: I1204 09:58:49.086597 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzq4j\" (UniqueName: \"kubernetes.io/projected/946cc19a-22ff-4a9b-862f-f9471d794bb0-kube-api-access-pzq4j\") pod \"manila-operator-controller-manager-7b6f6fdcbf-xmdtb\" (UID: \"946cc19a-22ff-4a9b-862f-f9471d794bb0\") " pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 09:58:49 crc kubenswrapper[4707]: I1204 09:58:49.091742 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/946cc19a-22ff-4a9b-862f-f9471d794bb0-webhook-cert\") pod \"manila-operator-controller-manager-7b6f6fdcbf-xmdtb\" (UID: \"946cc19a-22ff-4a9b-862f-f9471d794bb0\") " pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 09:58:49 crc kubenswrapper[4707]: I1204 09:58:49.092107 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/946cc19a-22ff-4a9b-862f-f9471d794bb0-apiservice-cert\") pod \"manila-operator-controller-manager-7b6f6fdcbf-xmdtb\" (UID: \"946cc19a-22ff-4a9b-862f-f9471d794bb0\") " pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 09:58:49 crc kubenswrapper[4707]: I1204 09:58:49.106742 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzq4j\" (UniqueName: \"kubernetes.io/projected/946cc19a-22ff-4a9b-862f-f9471d794bb0-kube-api-access-pzq4j\") pod \"manila-operator-controller-manager-7b6f6fdcbf-xmdtb\" (UID: \"946cc19a-22ff-4a9b-862f-f9471d794bb0\") " pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 09:58:49 crc kubenswrapper[4707]: I1204 09:58:49.233849 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 09:58:49 crc kubenswrapper[4707]: I1204 09:58:49.689325 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb"] Dec 04 09:58:50 crc kubenswrapper[4707]: I1204 09:58:50.663384 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" event={"ID":"946cc19a-22ff-4a9b-862f-f9471d794bb0","Type":"ContainerStarted","Data":"e999c685535aa53b32f47ff0d28c9415d63f66cd236bee8876dac193d90cd72a"} Dec 04 09:58:53 crc kubenswrapper[4707]: I1204 09:58:53.694258 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" event={"ID":"946cc19a-22ff-4a9b-862f-f9471d794bb0","Type":"ContainerStarted","Data":"dea4af4e97d8032f50bc97a7c4eaa03a420e6f12c96395fc960168eb6b20a072"} Dec 04 09:58:53 crc kubenswrapper[4707]: I1204 09:58:53.694887 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 09:58:53 crc kubenswrapper[4707]: I1204 09:58:53.713362 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" podStartSLOduration=2.7003069650000002 podStartE2EDuration="5.71330478s" podCreationTimestamp="2025-12-04 09:58:48 +0000 UTC" firstStartedPulling="2025-12-04 09:58:49.703092838 +0000 UTC m=+1229.138915345" lastFinishedPulling="2025-12-04 09:58:52.716090653 +0000 UTC m=+1232.151913160" observedRunningTime="2025-12-04 09:58:53.710232123 +0000 UTC m=+1233.146054630" watchObservedRunningTime="2025-12-04 09:58:53.71330478 +0000 UTC m=+1233.149127287" Dec 04 09:58:59 crc kubenswrapper[4707]: I1204 09:58:59.239501 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 09:59:00 crc kubenswrapper[4707]: I1204 09:59:00.816927 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:59:00 crc kubenswrapper[4707]: I1204 09:59:00.817322 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.685736 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-db-create-hpsb5"] Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.686840 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-hpsb5" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.691786 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs"] Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.692703 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.694258 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-db-secret" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.700511 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-create-hpsb5"] Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.711289 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs"] Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.714583 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rzbj\" (UniqueName: \"kubernetes.io/projected/9fc71a77-6268-43a1-b4a2-322b2016e6bc-kube-api-access-2rzbj\") pod \"manila-9bcb-account-create-update-kqvrs\" (UID: \"9fc71a77-6268-43a1-b4a2-322b2016e6bc\") " pod="manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.714849 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fc71a77-6268-43a1-b4a2-322b2016e6bc-operator-scripts\") pod \"manila-9bcb-account-create-update-kqvrs\" (UID: \"9fc71a77-6268-43a1-b4a2-322b2016e6bc\") " pod="manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.714960 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmch2\" (UniqueName: \"kubernetes.io/projected/ad9bc849-2e93-44b6-81de-5b72a9c1e0eb-kube-api-access-mmch2\") pod \"manila-db-create-hpsb5\" (UID: \"ad9bc849-2e93-44b6-81de-5b72a9c1e0eb\") " pod="manila-kuttl-tests/manila-db-create-hpsb5" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.715129 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad9bc849-2e93-44b6-81de-5b72a9c1e0eb-operator-scripts\") pod \"manila-db-create-hpsb5\" (UID: \"ad9bc849-2e93-44b6-81de-5b72a9c1e0eb\") " pod="manila-kuttl-tests/manila-db-create-hpsb5" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.816030 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad9bc849-2e93-44b6-81de-5b72a9c1e0eb-operator-scripts\") pod \"manila-db-create-hpsb5\" (UID: \"ad9bc849-2e93-44b6-81de-5b72a9c1e0eb\") " pod="manila-kuttl-tests/manila-db-create-hpsb5" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.816116 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fc71a77-6268-43a1-b4a2-322b2016e6bc-operator-scripts\") pod \"manila-9bcb-account-create-update-kqvrs\" (UID: \"9fc71a77-6268-43a1-b4a2-322b2016e6bc\") " pod="manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.816145 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rzbj\" (UniqueName: \"kubernetes.io/projected/9fc71a77-6268-43a1-b4a2-322b2016e6bc-kube-api-access-2rzbj\") pod \"manila-9bcb-account-create-update-kqvrs\" (UID: \"9fc71a77-6268-43a1-b4a2-322b2016e6bc\") " pod="manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.816168 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmch2\" (UniqueName: \"kubernetes.io/projected/ad9bc849-2e93-44b6-81de-5b72a9c1e0eb-kube-api-access-mmch2\") pod \"manila-db-create-hpsb5\" (UID: \"ad9bc849-2e93-44b6-81de-5b72a9c1e0eb\") " pod="manila-kuttl-tests/manila-db-create-hpsb5" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.816829 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad9bc849-2e93-44b6-81de-5b72a9c1e0eb-operator-scripts\") pod \"manila-db-create-hpsb5\" (UID: \"ad9bc849-2e93-44b6-81de-5b72a9c1e0eb\") " pod="manila-kuttl-tests/manila-db-create-hpsb5" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.817154 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fc71a77-6268-43a1-b4a2-322b2016e6bc-operator-scripts\") pod \"manila-9bcb-account-create-update-kqvrs\" (UID: \"9fc71a77-6268-43a1-b4a2-322b2016e6bc\") " pod="manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.841393 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmch2\" (UniqueName: \"kubernetes.io/projected/ad9bc849-2e93-44b6-81de-5b72a9c1e0eb-kube-api-access-mmch2\") pod \"manila-db-create-hpsb5\" (UID: \"ad9bc849-2e93-44b6-81de-5b72a9c1e0eb\") " pod="manila-kuttl-tests/manila-db-create-hpsb5" Dec 04 09:59:03 crc kubenswrapper[4707]: I1204 09:59:03.841393 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rzbj\" (UniqueName: \"kubernetes.io/projected/9fc71a77-6268-43a1-b4a2-322b2016e6bc-kube-api-access-2rzbj\") pod \"manila-9bcb-account-create-update-kqvrs\" (UID: \"9fc71a77-6268-43a1-b4a2-322b2016e6bc\") " pod="manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs" Dec 04 09:59:04 crc kubenswrapper[4707]: I1204 09:59:04.013582 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-hpsb5" Dec 04 09:59:04 crc kubenswrapper[4707]: I1204 09:59:04.021103 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs" Dec 04 09:59:04 crc kubenswrapper[4707]: W1204 09:59:04.310496 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9fc71a77_6268_43a1_b4a2_322b2016e6bc.slice/crio-b0452c01c128fc2cc3a682a23ee192a5509f6f44b0b343a13233d23078b1677e WatchSource:0}: Error finding container b0452c01c128fc2cc3a682a23ee192a5509f6f44b0b343a13233d23078b1677e: Status 404 returned error can't find the container with id b0452c01c128fc2cc3a682a23ee192a5509f6f44b0b343a13233d23078b1677e Dec 04 09:59:04 crc kubenswrapper[4707]: I1204 09:59:04.311216 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs"] Dec 04 09:59:04 crc kubenswrapper[4707]: I1204 09:59:04.609450 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-create-hpsb5"] Dec 04 09:59:04 crc kubenswrapper[4707]: W1204 09:59:04.615570 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad9bc849_2e93_44b6_81de_5b72a9c1e0eb.slice/crio-92a4885439becc65cbc139530eb8d5e1b62c131bca44e48e5b9106a7e907bae4 WatchSource:0}: Error finding container 92a4885439becc65cbc139530eb8d5e1b62c131bca44e48e5b9106a7e907bae4: Status 404 returned error can't find the container with id 92a4885439becc65cbc139530eb8d5e1b62c131bca44e48e5b9106a7e907bae4 Dec 04 09:59:04 crc kubenswrapper[4707]: I1204 09:59:04.765604 4707 generic.go:334] "Generic (PLEG): container finished" podID="9fc71a77-6268-43a1-b4a2-322b2016e6bc" containerID="a005f97f50b5ac3055eae49dcb7c1d520b9756155f43a9fd53876fc06be20a58" exitCode=0 Dec 04 09:59:04 crc kubenswrapper[4707]: I1204 09:59:04.765689 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs" event={"ID":"9fc71a77-6268-43a1-b4a2-322b2016e6bc","Type":"ContainerDied","Data":"a005f97f50b5ac3055eae49dcb7c1d520b9756155f43a9fd53876fc06be20a58"} Dec 04 09:59:04 crc kubenswrapper[4707]: I1204 09:59:04.765717 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs" event={"ID":"9fc71a77-6268-43a1-b4a2-322b2016e6bc","Type":"ContainerStarted","Data":"b0452c01c128fc2cc3a682a23ee192a5509f6f44b0b343a13233d23078b1677e"} Dec 04 09:59:04 crc kubenswrapper[4707]: I1204 09:59:04.766811 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-hpsb5" event={"ID":"ad9bc849-2e93-44b6-81de-5b72a9c1e0eb","Type":"ContainerStarted","Data":"92a4885439becc65cbc139530eb8d5e1b62c131bca44e48e5b9106a7e907bae4"} Dec 04 09:59:05 crc kubenswrapper[4707]: I1204 09:59:05.775196 4707 generic.go:334] "Generic (PLEG): container finished" podID="ad9bc849-2e93-44b6-81de-5b72a9c1e0eb" containerID="0c5be9822eadcf7e6cccc699a343a98de4285bff130efaca8d7271edac999ac2" exitCode=0 Dec 04 09:59:05 crc kubenswrapper[4707]: I1204 09:59:05.775289 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-hpsb5" event={"ID":"ad9bc849-2e93-44b6-81de-5b72a9c1e0eb","Type":"ContainerDied","Data":"0c5be9822eadcf7e6cccc699a343a98de4285bff130efaca8d7271edac999ac2"} Dec 04 09:59:06 crc kubenswrapper[4707]: I1204 09:59:06.069788 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs" Dec 04 09:59:06 crc kubenswrapper[4707]: I1204 09:59:06.171294 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rzbj\" (UniqueName: \"kubernetes.io/projected/9fc71a77-6268-43a1-b4a2-322b2016e6bc-kube-api-access-2rzbj\") pod \"9fc71a77-6268-43a1-b4a2-322b2016e6bc\" (UID: \"9fc71a77-6268-43a1-b4a2-322b2016e6bc\") " Dec 04 09:59:06 crc kubenswrapper[4707]: I1204 09:59:06.171470 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fc71a77-6268-43a1-b4a2-322b2016e6bc-operator-scripts\") pod \"9fc71a77-6268-43a1-b4a2-322b2016e6bc\" (UID: \"9fc71a77-6268-43a1-b4a2-322b2016e6bc\") " Dec 04 09:59:06 crc kubenswrapper[4707]: I1204 09:59:06.172749 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9fc71a77-6268-43a1-b4a2-322b2016e6bc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9fc71a77-6268-43a1-b4a2-322b2016e6bc" (UID: "9fc71a77-6268-43a1-b4a2-322b2016e6bc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:59:06 crc kubenswrapper[4707]: I1204 09:59:06.178702 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fc71a77-6268-43a1-b4a2-322b2016e6bc-kube-api-access-2rzbj" (OuterVolumeSpecName: "kube-api-access-2rzbj") pod "9fc71a77-6268-43a1-b4a2-322b2016e6bc" (UID: "9fc71a77-6268-43a1-b4a2-322b2016e6bc"). InnerVolumeSpecName "kube-api-access-2rzbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:59:06 crc kubenswrapper[4707]: I1204 09:59:06.273275 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rzbj\" (UniqueName: \"kubernetes.io/projected/9fc71a77-6268-43a1-b4a2-322b2016e6bc-kube-api-access-2rzbj\") on node \"crc\" DevicePath \"\"" Dec 04 09:59:06 crc kubenswrapper[4707]: I1204 09:59:06.273330 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fc71a77-6268-43a1-b4a2-322b2016e6bc-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 09:59:06 crc kubenswrapper[4707]: I1204 09:59:06.783701 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs" Dec 04 09:59:06 crc kubenswrapper[4707]: I1204 09:59:06.785631 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs" event={"ID":"9fc71a77-6268-43a1-b4a2-322b2016e6bc","Type":"ContainerDied","Data":"b0452c01c128fc2cc3a682a23ee192a5509f6f44b0b343a13233d23078b1677e"} Dec 04 09:59:06 crc kubenswrapper[4707]: I1204 09:59:06.785667 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0452c01c128fc2cc3a682a23ee192a5509f6f44b0b343a13233d23078b1677e" Dec 04 09:59:07 crc kubenswrapper[4707]: I1204 09:59:07.057002 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-hpsb5" Dec 04 09:59:07 crc kubenswrapper[4707]: I1204 09:59:07.084787 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad9bc849-2e93-44b6-81de-5b72a9c1e0eb-operator-scripts\") pod \"ad9bc849-2e93-44b6-81de-5b72a9c1e0eb\" (UID: \"ad9bc849-2e93-44b6-81de-5b72a9c1e0eb\") " Dec 04 09:59:07 crc kubenswrapper[4707]: I1204 09:59:07.084854 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmch2\" (UniqueName: \"kubernetes.io/projected/ad9bc849-2e93-44b6-81de-5b72a9c1e0eb-kube-api-access-mmch2\") pod \"ad9bc849-2e93-44b6-81de-5b72a9c1e0eb\" (UID: \"ad9bc849-2e93-44b6-81de-5b72a9c1e0eb\") " Dec 04 09:59:07 crc kubenswrapper[4707]: I1204 09:59:07.085249 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad9bc849-2e93-44b6-81de-5b72a9c1e0eb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ad9bc849-2e93-44b6-81de-5b72a9c1e0eb" (UID: "ad9bc849-2e93-44b6-81de-5b72a9c1e0eb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 09:59:07 crc kubenswrapper[4707]: I1204 09:59:07.085349 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad9bc849-2e93-44b6-81de-5b72a9c1e0eb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 09:59:07 crc kubenswrapper[4707]: I1204 09:59:07.089842 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad9bc849-2e93-44b6-81de-5b72a9c1e0eb-kube-api-access-mmch2" (OuterVolumeSpecName: "kube-api-access-mmch2") pod "ad9bc849-2e93-44b6-81de-5b72a9c1e0eb" (UID: "ad9bc849-2e93-44b6-81de-5b72a9c1e0eb"). InnerVolumeSpecName "kube-api-access-mmch2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:59:07 crc kubenswrapper[4707]: I1204 09:59:07.187145 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmch2\" (UniqueName: \"kubernetes.io/projected/ad9bc849-2e93-44b6-81de-5b72a9c1e0eb-kube-api-access-mmch2\") on node \"crc\" DevicePath \"\"" Dec 04 09:59:07 crc kubenswrapper[4707]: I1204 09:59:07.790879 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-hpsb5" event={"ID":"ad9bc849-2e93-44b6-81de-5b72a9c1e0eb","Type":"ContainerDied","Data":"92a4885439becc65cbc139530eb8d5e1b62c131bca44e48e5b9106a7e907bae4"} Dec 04 09:59:07 crc kubenswrapper[4707]: I1204 09:59:07.791241 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="92a4885439becc65cbc139530eb8d5e1b62c131bca44e48e5b9106a7e907bae4" Dec 04 09:59:07 crc kubenswrapper[4707]: I1204 09:59:07.790913 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-hpsb5" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.006167 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-db-sync-tp72z"] Dec 04 09:59:09 crc kubenswrapper[4707]: E1204 09:59:09.006521 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fc71a77-6268-43a1-b4a2-322b2016e6bc" containerName="mariadb-account-create-update" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.006536 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fc71a77-6268-43a1-b4a2-322b2016e6bc" containerName="mariadb-account-create-update" Dec 04 09:59:09 crc kubenswrapper[4707]: E1204 09:59:09.006550 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad9bc849-2e93-44b6-81de-5b72a9c1e0eb" containerName="mariadb-database-create" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.006559 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad9bc849-2e93-44b6-81de-5b72a9c1e0eb" containerName="mariadb-database-create" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.006699 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fc71a77-6268-43a1-b4a2-322b2016e6bc" containerName="mariadb-account-create-update" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.006726 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad9bc849-2e93-44b6-81de-5b72a9c1e0eb" containerName="mariadb-database-create" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.007230 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-tp72z" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.009419 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-manila-dockercfg-b6btg" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.018935 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-sync-tp72z"] Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.021559 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-config-data" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.115512 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d79960f1-b80d-4f51-aabb-64ce739c03ce-job-config-data\") pod \"manila-db-sync-tp72z\" (UID: \"d79960f1-b80d-4f51-aabb-64ce739c03ce\") " pod="manila-kuttl-tests/manila-db-sync-tp72z" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.115631 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d79960f1-b80d-4f51-aabb-64ce739c03ce-config-data\") pod \"manila-db-sync-tp72z\" (UID: \"d79960f1-b80d-4f51-aabb-64ce739c03ce\") " pod="manila-kuttl-tests/manila-db-sync-tp72z" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.115693 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxcsg\" (UniqueName: \"kubernetes.io/projected/d79960f1-b80d-4f51-aabb-64ce739c03ce-kube-api-access-cxcsg\") pod \"manila-db-sync-tp72z\" (UID: \"d79960f1-b80d-4f51-aabb-64ce739c03ce\") " pod="manila-kuttl-tests/manila-db-sync-tp72z" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.216498 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d79960f1-b80d-4f51-aabb-64ce739c03ce-config-data\") pod \"manila-db-sync-tp72z\" (UID: \"d79960f1-b80d-4f51-aabb-64ce739c03ce\") " pod="manila-kuttl-tests/manila-db-sync-tp72z" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.216600 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxcsg\" (UniqueName: \"kubernetes.io/projected/d79960f1-b80d-4f51-aabb-64ce739c03ce-kube-api-access-cxcsg\") pod \"manila-db-sync-tp72z\" (UID: \"d79960f1-b80d-4f51-aabb-64ce739c03ce\") " pod="manila-kuttl-tests/manila-db-sync-tp72z" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.216647 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d79960f1-b80d-4f51-aabb-64ce739c03ce-job-config-data\") pod \"manila-db-sync-tp72z\" (UID: \"d79960f1-b80d-4f51-aabb-64ce739c03ce\") " pod="manila-kuttl-tests/manila-db-sync-tp72z" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.223361 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d79960f1-b80d-4f51-aabb-64ce739c03ce-job-config-data\") pod \"manila-db-sync-tp72z\" (UID: \"d79960f1-b80d-4f51-aabb-64ce739c03ce\") " pod="manila-kuttl-tests/manila-db-sync-tp72z" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.223757 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d79960f1-b80d-4f51-aabb-64ce739c03ce-config-data\") pod \"manila-db-sync-tp72z\" (UID: \"d79960f1-b80d-4f51-aabb-64ce739c03ce\") " pod="manila-kuttl-tests/manila-db-sync-tp72z" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.233214 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxcsg\" (UniqueName: \"kubernetes.io/projected/d79960f1-b80d-4f51-aabb-64ce739c03ce-kube-api-access-cxcsg\") pod \"manila-db-sync-tp72z\" (UID: \"d79960f1-b80d-4f51-aabb-64ce739c03ce\") " pod="manila-kuttl-tests/manila-db-sync-tp72z" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.322982 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-tp72z" Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.732489 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-sync-tp72z"] Dec 04 09:59:09 crc kubenswrapper[4707]: W1204 09:59:09.735881 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd79960f1_b80d_4f51_aabb_64ce739c03ce.slice/crio-24c636def1cd23314840a7da25208ce0b806b412fced5e1319a903d5fa3f88eb WatchSource:0}: Error finding container 24c636def1cd23314840a7da25208ce0b806b412fced5e1319a903d5fa3f88eb: Status 404 returned error can't find the container with id 24c636def1cd23314840a7da25208ce0b806b412fced5e1319a903d5fa3f88eb Dec 04 09:59:09 crc kubenswrapper[4707]: I1204 09:59:09.803466 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-tp72z" event={"ID":"d79960f1-b80d-4f51-aabb-64ce739c03ce","Type":"ContainerStarted","Data":"24c636def1cd23314840a7da25208ce0b806b412fced5e1319a903d5fa3f88eb"} Dec 04 09:59:15 crc kubenswrapper[4707]: I1204 09:59:15.875514 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-tp72z" event={"ID":"d79960f1-b80d-4f51-aabb-64ce739c03ce","Type":"ContainerStarted","Data":"71279b45abda23aed836834dff7a5b6c3a8b344427e097bd9e981c90b505a3d2"} Dec 04 09:59:15 crc kubenswrapper[4707]: I1204 09:59:15.893194 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-db-sync-tp72z" podStartSLOduration=2.56751779 podStartE2EDuration="7.893173232s" podCreationTimestamp="2025-12-04 09:59:08 +0000 UTC" firstStartedPulling="2025-12-04 09:59:09.738637501 +0000 UTC m=+1249.174460008" lastFinishedPulling="2025-12-04 09:59:15.064292943 +0000 UTC m=+1254.500115450" observedRunningTime="2025-12-04 09:59:15.893145051 +0000 UTC m=+1255.328967558" watchObservedRunningTime="2025-12-04 09:59:15.893173232 +0000 UTC m=+1255.328995739" Dec 04 09:59:30 crc kubenswrapper[4707]: I1204 09:59:30.817644 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 09:59:30 crc kubenswrapper[4707]: I1204 09:59:30.818194 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 09:59:30 crc kubenswrapper[4707]: I1204 09:59:30.818241 4707 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 09:59:30 crc kubenswrapper[4707]: I1204 09:59:30.819022 4707 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e65929a2443d875614d7254731d235fbb949ab647cb08abccd54722f9ef3c29b"} pod="openshift-machine-config-operator/machine-config-daemon-c244z" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 09:59:30 crc kubenswrapper[4707]: I1204 09:59:30.819079 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" containerID="cri-o://e65929a2443d875614d7254731d235fbb949ab647cb08abccd54722f9ef3c29b" gracePeriod=600 Dec 04 09:59:30 crc kubenswrapper[4707]: I1204 09:59:30.987081 4707 generic.go:334] "Generic (PLEG): container finished" podID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerID="e65929a2443d875614d7254731d235fbb949ab647cb08abccd54722f9ef3c29b" exitCode=0 Dec 04 09:59:30 crc kubenswrapper[4707]: I1204 09:59:30.987167 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerDied","Data":"e65929a2443d875614d7254731d235fbb949ab647cb08abccd54722f9ef3c29b"} Dec 04 09:59:30 crc kubenswrapper[4707]: I1204 09:59:30.987487 4707 scope.go:117] "RemoveContainer" containerID="7beea26bd12c9b8a3dede9145f5d95cbd909ef83792c0ec9ae43b628c20e8918" Dec 04 09:59:31 crc kubenswrapper[4707]: I1204 09:59:31.995848 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerStarted","Data":"a40438c3e1376df9722e48d67ff0c6c89cb3a6cefb6a18fc0adad90335e07b60"} Dec 04 09:59:34 crc kubenswrapper[4707]: I1204 09:59:34.011972 4707 generic.go:334] "Generic (PLEG): container finished" podID="d79960f1-b80d-4f51-aabb-64ce739c03ce" containerID="71279b45abda23aed836834dff7a5b6c3a8b344427e097bd9e981c90b505a3d2" exitCode=0 Dec 04 09:59:34 crc kubenswrapper[4707]: I1204 09:59:34.012061 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-tp72z" event={"ID":"d79960f1-b80d-4f51-aabb-64ce739c03ce","Type":"ContainerDied","Data":"71279b45abda23aed836834dff7a5b6c3a8b344427e097bd9e981c90b505a3d2"} Dec 04 09:59:35 crc kubenswrapper[4707]: I1204 09:59:35.323103 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-tp72z" Dec 04 09:59:35 crc kubenswrapper[4707]: I1204 09:59:35.412941 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxcsg\" (UniqueName: \"kubernetes.io/projected/d79960f1-b80d-4f51-aabb-64ce739c03ce-kube-api-access-cxcsg\") pod \"d79960f1-b80d-4f51-aabb-64ce739c03ce\" (UID: \"d79960f1-b80d-4f51-aabb-64ce739c03ce\") " Dec 04 09:59:35 crc kubenswrapper[4707]: I1204 09:59:35.413117 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d79960f1-b80d-4f51-aabb-64ce739c03ce-config-data\") pod \"d79960f1-b80d-4f51-aabb-64ce739c03ce\" (UID: \"d79960f1-b80d-4f51-aabb-64ce739c03ce\") " Dec 04 09:59:35 crc kubenswrapper[4707]: I1204 09:59:35.413151 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d79960f1-b80d-4f51-aabb-64ce739c03ce-job-config-data\") pod \"d79960f1-b80d-4f51-aabb-64ce739c03ce\" (UID: \"d79960f1-b80d-4f51-aabb-64ce739c03ce\") " Dec 04 09:59:35 crc kubenswrapper[4707]: I1204 09:59:35.423129 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d79960f1-b80d-4f51-aabb-64ce739c03ce-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "d79960f1-b80d-4f51-aabb-64ce739c03ce" (UID: "d79960f1-b80d-4f51-aabb-64ce739c03ce"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:59:35 crc kubenswrapper[4707]: I1204 09:59:35.423236 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d79960f1-b80d-4f51-aabb-64ce739c03ce-kube-api-access-cxcsg" (OuterVolumeSpecName: "kube-api-access-cxcsg") pod "d79960f1-b80d-4f51-aabb-64ce739c03ce" (UID: "d79960f1-b80d-4f51-aabb-64ce739c03ce"). InnerVolumeSpecName "kube-api-access-cxcsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 09:59:35 crc kubenswrapper[4707]: I1204 09:59:35.425190 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d79960f1-b80d-4f51-aabb-64ce739c03ce-config-data" (OuterVolumeSpecName: "config-data") pod "d79960f1-b80d-4f51-aabb-64ce739c03ce" (UID: "d79960f1-b80d-4f51-aabb-64ce739c03ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 09:59:35 crc kubenswrapper[4707]: I1204 09:59:35.515145 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxcsg\" (UniqueName: \"kubernetes.io/projected/d79960f1-b80d-4f51-aabb-64ce739c03ce-kube-api-access-cxcsg\") on node \"crc\" DevicePath \"\"" Dec 04 09:59:35 crc kubenswrapper[4707]: I1204 09:59:35.515528 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d79960f1-b80d-4f51-aabb-64ce739c03ce-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 09:59:35 crc kubenswrapper[4707]: I1204 09:59:35.515544 4707 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d79960f1-b80d-4f51-aabb-64ce739c03ce-job-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.039035 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-tp72z" event={"ID":"d79960f1-b80d-4f51-aabb-64ce739c03ce","Type":"ContainerDied","Data":"24c636def1cd23314840a7da25208ce0b806b412fced5e1319a903d5fa3f88eb"} Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.039071 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24c636def1cd23314840a7da25208ce0b806b412fced5e1319a903d5fa3f88eb" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.039147 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-tp72z" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.319026 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 09:59:36 crc kubenswrapper[4707]: E1204 09:59:36.319422 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d79960f1-b80d-4f51-aabb-64ce739c03ce" containerName="manila-db-sync" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.319443 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="d79960f1-b80d-4f51-aabb-64ce739c03ce" containerName="manila-db-sync" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.319607 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="d79960f1-b80d-4f51-aabb-64ce739c03ce" containerName="manila-db-sync" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.320555 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.323045 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-scheduler-config-data" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.323308 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-manila-dockercfg-b6btg" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.325213 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.327599 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.329609 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.329693 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d7b1bef-7bf1-4789-8783-5552386590a5-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.329748 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-scripts\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.329790 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2md5\" (UniqueName: \"kubernetes.io/projected/8d7b1bef-7bf1-4789-8783-5552386590a5-kube-api-access-m2md5\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.329833 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-config-data\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.353096 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-scripts" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.353862 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"ceph-conf-files" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.354220 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-share-share0-config-data" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.355930 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-config-data" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.380618 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.388314 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.431727 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-config-data\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.432165 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjtb2\" (UniqueName: \"kubernetes.io/projected/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-kube-api-access-pjtb2\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.432221 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.432251 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.432296 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-scripts\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.432320 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-ceph\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.432407 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-config-data\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.432464 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d7b1bef-7bf1-4789-8783-5552386590a5-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.432505 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.432545 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-scripts\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.432625 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2md5\" (UniqueName: \"kubernetes.io/projected/8d7b1bef-7bf1-4789-8783-5552386590a5-kube-api-access-m2md5\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.432662 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.432765 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d7b1bef-7bf1-4789-8783-5552386590a5-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.457295 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-scripts\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.457551 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.457630 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-config-data\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.470964 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2md5\" (UniqueName: \"kubernetes.io/projected/8d7b1bef-7bf1-4789-8783-5552386590a5-kube-api-access-m2md5\") pod \"manila-scheduler-0\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.491260 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.492602 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.499396 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-api-config-data" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.512404 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.533953 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-scripts\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.534000 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-ceph\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.534029 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-etc-machine-id\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.534064 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4r6v\" (UniqueName: \"kubernetes.io/projected/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-kube-api-access-s4r6v\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.534089 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-config-data\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.534115 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-config-data-custom\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.534281 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.534471 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-config-data\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.534523 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.534609 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-logs\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.534680 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjtb2\" (UniqueName: \"kubernetes.io/projected/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-kube-api-access-pjtb2\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.534689 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.534876 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-scripts\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.534929 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.535031 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.538888 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.539041 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-scripts\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.539532 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-config-data\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.539890 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-ceph\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.554068 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjtb2\" (UniqueName: \"kubernetes.io/projected/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-kube-api-access-pjtb2\") pod \"manila-share-share0-0\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.636772 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-scripts\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.636827 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-etc-machine-id\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.636853 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4r6v\" (UniqueName: \"kubernetes.io/projected/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-kube-api-access-s4r6v\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.636876 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-config-data-custom\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.636924 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-config-data\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.636950 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-logs\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.637372 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-logs\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.637607 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-etc-machine-id\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.641168 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-config-data\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.642227 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-config-data-custom\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.644828 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-scripts\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.656687 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4r6v\" (UniqueName: \"kubernetes.io/projected/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-kube-api-access-s4r6v\") pod \"manila-api-0\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.671026 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.680892 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:36 crc kubenswrapper[4707]: I1204 09:59:36.830203 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:37 crc kubenswrapper[4707]: I1204 09:59:37.154967 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 09:59:37 crc kubenswrapper[4707]: I1204 09:59:37.220196 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 09:59:37 crc kubenswrapper[4707]: W1204 09:59:37.224055 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7cc3e1c_5829_4a8e_9bfd_6829eaca788c.slice/crio-819e6df12d3505a8f9a20495e16708ae16621b3cd91ae37491f213606262d961 WatchSource:0}: Error finding container 819e6df12d3505a8f9a20495e16708ae16621b3cd91ae37491f213606262d961: Status 404 returned error can't find the container with id 819e6df12d3505a8f9a20495e16708ae16621b3cd91ae37491f213606262d961 Dec 04 09:59:37 crc kubenswrapper[4707]: I1204 09:59:37.339406 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 09:59:37 crc kubenswrapper[4707]: W1204 09:59:37.343923 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7facb196_5d86_4e0c_9fad_0fb8fd19cce2.slice/crio-0894c5bd81feb117d62c81b064d06460683eb5cb6e593c3f7c590800b7e08c8d WatchSource:0}: Error finding container 0894c5bd81feb117d62c81b064d06460683eb5cb6e593c3f7c590800b7e08c8d: Status 404 returned error can't find the container with id 0894c5bd81feb117d62c81b064d06460683eb5cb6e593c3f7c590800b7e08c8d Dec 04 09:59:38 crc kubenswrapper[4707]: I1204 09:59:38.053602 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"7facb196-5d86-4e0c-9fad-0fb8fd19cce2","Type":"ContainerStarted","Data":"c2fcf1b6e43bea6c10ebbd2f9828bad67e83be5b8fcb8f2e18629719f1326c3c"} Dec 04 09:59:38 crc kubenswrapper[4707]: I1204 09:59:38.054194 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"7facb196-5d86-4e0c-9fad-0fb8fd19cce2","Type":"ContainerStarted","Data":"0894c5bd81feb117d62c81b064d06460683eb5cb6e593c3f7c590800b7e08c8d"} Dec 04 09:59:38 crc kubenswrapper[4707]: I1204 09:59:38.055822 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c","Type":"ContainerStarted","Data":"819e6df12d3505a8f9a20495e16708ae16621b3cd91ae37491f213606262d961"} Dec 04 09:59:38 crc kubenswrapper[4707]: I1204 09:59:38.057094 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"8d7b1bef-7bf1-4789-8783-5552386590a5","Type":"ContainerStarted","Data":"735b67224f0c6817032980ca2db85730b5f5a83f59100468b33cc95448ff0dcb"} Dec 04 09:59:39 crc kubenswrapper[4707]: I1204 09:59:39.067432 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"8d7b1bef-7bf1-4789-8783-5552386590a5","Type":"ContainerStarted","Data":"5aadecca11b44e952be025178b282fb3df063eeb5bf621caa173d1097dc1f9bf"} Dec 04 09:59:39 crc kubenswrapper[4707]: I1204 09:59:39.073421 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"7facb196-5d86-4e0c-9fad-0fb8fd19cce2","Type":"ContainerStarted","Data":"7c1c69f436be516d26324399596887dabf212ddc28256aed3c63e429422453dd"} Dec 04 09:59:39 crc kubenswrapper[4707]: I1204 09:59:39.073584 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:39 crc kubenswrapper[4707]: I1204 09:59:39.092835 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-api-0" podStartSLOduration=3.092819902 podStartE2EDuration="3.092819902s" podCreationTimestamp="2025-12-04 09:59:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 09:59:39.091231452 +0000 UTC m=+1278.527053969" watchObservedRunningTime="2025-12-04 09:59:39.092819902 +0000 UTC m=+1278.528642409" Dec 04 09:59:40 crc kubenswrapper[4707]: I1204 09:59:40.084176 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"8d7b1bef-7bf1-4789-8783-5552386590a5","Type":"ContainerStarted","Data":"d4fd869bcee883df44d64b0a566760487cf30a191951d126f1bf926e4c62882e"} Dec 04 09:59:40 crc kubenswrapper[4707]: I1204 09:59:40.112697 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-scheduler-0" podStartSLOduration=2.7352250849999997 podStartE2EDuration="4.112672673s" podCreationTimestamp="2025-12-04 09:59:36 +0000 UTC" firstStartedPulling="2025-12-04 09:59:37.159241307 +0000 UTC m=+1276.595063814" lastFinishedPulling="2025-12-04 09:59:38.536688895 +0000 UTC m=+1277.972511402" observedRunningTime="2025-12-04 09:59:40.106654093 +0000 UTC m=+1279.542476600" watchObservedRunningTime="2025-12-04 09:59:40.112672673 +0000 UTC m=+1279.548495190" Dec 04 09:59:45 crc kubenswrapper[4707]: I1204 09:59:45.125712 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c","Type":"ContainerStarted","Data":"70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e"} Dec 04 09:59:45 crc kubenswrapper[4707]: I1204 09:59:45.126290 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c","Type":"ContainerStarted","Data":"34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4"} Dec 04 09:59:45 crc kubenswrapper[4707]: I1204 09:59:45.162734 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-share-share0-0" podStartSLOduration=2.128242235 podStartE2EDuration="9.162708404s" podCreationTimestamp="2025-12-04 09:59:36 +0000 UTC" firstStartedPulling="2025-12-04 09:59:37.226121706 +0000 UTC m=+1276.661944203" lastFinishedPulling="2025-12-04 09:59:44.260587865 +0000 UTC m=+1283.696410372" observedRunningTime="2025-12-04 09:59:45.148662421 +0000 UTC m=+1284.584484958" watchObservedRunningTime="2025-12-04 09:59:45.162708404 +0000 UTC m=+1284.598530911" Dec 04 09:59:46 crc kubenswrapper[4707]: I1204 09:59:46.671365 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:46 crc kubenswrapper[4707]: I1204 09:59:46.681183 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 09:59:58 crc kubenswrapper[4707]: I1204 09:59:58.688006 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/manila-api-0" Dec 04 09:59:58 crc kubenswrapper[4707]: I1204 09:59:58.713672 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 09:59:58 crc kubenswrapper[4707]: I1204 09:59:58.870020 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.134299 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9"] Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.135067 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.138111 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.138114 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.144653 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9"] Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.309802 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-secret-volume\") pod \"collect-profiles-29414040-4v4w9\" (UID: \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.309971 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75zjm\" (UniqueName: \"kubernetes.io/projected/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-kube-api-access-75zjm\") pod \"collect-profiles-29414040-4v4w9\" (UID: \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.310119 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-config-volume\") pod \"collect-profiles-29414040-4v4w9\" (UID: \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.411883 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-secret-volume\") pod \"collect-profiles-29414040-4v4w9\" (UID: \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.411940 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75zjm\" (UniqueName: \"kubernetes.io/projected/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-kube-api-access-75zjm\") pod \"collect-profiles-29414040-4v4w9\" (UID: \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.411962 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-config-volume\") pod \"collect-profiles-29414040-4v4w9\" (UID: \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.413015 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-config-volume\") pod \"collect-profiles-29414040-4v4w9\" (UID: \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.420007 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-secret-volume\") pod \"collect-profiles-29414040-4v4w9\" (UID: \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.432235 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75zjm\" (UniqueName: \"kubernetes.io/projected/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-kube-api-access-75zjm\") pod \"collect-profiles-29414040-4v4w9\" (UID: \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" Dec 04 10:00:00 crc kubenswrapper[4707]: I1204 10:00:00.467930 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.368996 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9"] Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.690241 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-api-2"] Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.691383 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.696058 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-api-1"] Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.697314 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.706085 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-1"] Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.710120 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-2"] Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.831551 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56b0d567-e030-4df4-ad01-64ac41f5dd84-logs\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.831611 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f24w\" (UniqueName: \"kubernetes.io/projected/56b0d567-e030-4df4-ad01-64ac41f5dd84-kube-api-access-8f24w\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.831650 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-config-data-custom\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.831685 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9df69222-abc8-4696-9f62-5c8680f65068-etc-machine-id\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.831707 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h8tc\" (UniqueName: \"kubernetes.io/projected/9df69222-abc8-4696-9f62-5c8680f65068-kube-api-access-4h8tc\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.831759 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-scripts\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.831801 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-config-data\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.831841 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56b0d567-e030-4df4-ad01-64ac41f5dd84-etc-machine-id\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.831866 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9df69222-abc8-4696-9f62-5c8680f65068-logs\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.831906 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-config-data\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.831928 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-config-data-custom\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.831966 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-scripts\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.933465 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-config-data\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.933736 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-config-data-custom\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.933774 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-scripts\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.933845 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56b0d567-e030-4df4-ad01-64ac41f5dd84-logs\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.933870 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f24w\" (UniqueName: \"kubernetes.io/projected/56b0d567-e030-4df4-ad01-64ac41f5dd84-kube-api-access-8f24w\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.933910 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-config-data-custom\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.933944 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9df69222-abc8-4696-9f62-5c8680f65068-etc-machine-id\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.933970 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h8tc\" (UniqueName: \"kubernetes.io/projected/9df69222-abc8-4696-9f62-5c8680f65068-kube-api-access-4h8tc\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.933997 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-scripts\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.934026 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-config-data\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.934043 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56b0d567-e030-4df4-ad01-64ac41f5dd84-etc-machine-id\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.934059 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9df69222-abc8-4696-9f62-5c8680f65068-logs\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.934388 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9df69222-abc8-4696-9f62-5c8680f65068-logs\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.934658 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56b0d567-e030-4df4-ad01-64ac41f5dd84-etc-machine-id\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.934694 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9df69222-abc8-4696-9f62-5c8680f65068-etc-machine-id\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.934955 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56b0d567-e030-4df4-ad01-64ac41f5dd84-logs\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.940620 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-config-data-custom\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.940859 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-scripts\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.941202 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-config-data\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.942622 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-scripts\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.946945 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-config-data-custom\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.950961 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-config-data\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.954621 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h8tc\" (UniqueName: \"kubernetes.io/projected/9df69222-abc8-4696-9f62-5c8680f65068-kube-api-access-4h8tc\") pod \"manila-api-1\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:01 crc kubenswrapper[4707]: I1204 10:00:01.955610 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f24w\" (UniqueName: \"kubernetes.io/projected/56b0d567-e030-4df4-ad01-64ac41f5dd84-kube-api-access-8f24w\") pod \"manila-api-2\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:02 crc kubenswrapper[4707]: I1204 10:00:02.006171 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:02 crc kubenswrapper[4707]: I1204 10:00:02.013385 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:02 crc kubenswrapper[4707]: I1204 10:00:02.252229 4707 generic.go:334] "Generic (PLEG): container finished" podID="afe621d1-ee66-45f5-a0da-d2473ecc5cf1" containerID="b49197bab8448af77f054c91ab427b72c5aa9937308e34ba38847b7e094129c4" exitCode=0 Dec 04 10:00:02 crc kubenswrapper[4707]: I1204 10:00:02.252273 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" event={"ID":"afe621d1-ee66-45f5-a0da-d2473ecc5cf1","Type":"ContainerDied","Data":"b49197bab8448af77f054c91ab427b72c5aa9937308e34ba38847b7e094129c4"} Dec 04 10:00:02 crc kubenswrapper[4707]: I1204 10:00:02.252303 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" event={"ID":"afe621d1-ee66-45f5-a0da-d2473ecc5cf1","Type":"ContainerStarted","Data":"9174204f45737a91591c275e25b71629c2b79a6055fab8c2256956db8215f306"} Dec 04 10:00:02 crc kubenswrapper[4707]: I1204 10:00:02.473677 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-1"] Dec 04 10:00:02 crc kubenswrapper[4707]: I1204 10:00:02.527779 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-2"] Dec 04 10:00:03 crc kubenswrapper[4707]: I1204 10:00:03.264119 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-2" event={"ID":"56b0d567-e030-4df4-ad01-64ac41f5dd84","Type":"ContainerStarted","Data":"20bdd824d2438813d28b97f722219d9f5c4b5d409ffeab3cad7a78d5dd70050f"} Dec 04 10:00:03 crc kubenswrapper[4707]: I1204 10:00:03.264703 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-2" event={"ID":"56b0d567-e030-4df4-ad01-64ac41f5dd84","Type":"ContainerStarted","Data":"06710237e20776fb327765744caa1ae292f4bd848796f716803c117ae3dba801"} Dec 04 10:00:03 crc kubenswrapper[4707]: I1204 10:00:03.266769 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-1" event={"ID":"9df69222-abc8-4696-9f62-5c8680f65068","Type":"ContainerStarted","Data":"56d8bdaf47be2237a55bc43fd033fc94f334251a1704a688fdf19c5dc9158905"} Dec 04 10:00:03 crc kubenswrapper[4707]: I1204 10:00:03.266803 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-1" event={"ID":"9df69222-abc8-4696-9f62-5c8680f65068","Type":"ContainerStarted","Data":"2e6ece3ad68a07e2c372ec229fc6c9b9a7a632a5612e79a3f91585b39351144e"} Dec 04 10:00:03 crc kubenswrapper[4707]: I1204 10:00:03.546313 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" Dec 04 10:00:03 crc kubenswrapper[4707]: I1204 10:00:03.663195 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-config-volume\") pod \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\" (UID: \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\") " Dec 04 10:00:03 crc kubenswrapper[4707]: I1204 10:00:03.663558 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-secret-volume\") pod \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\" (UID: \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\") " Dec 04 10:00:03 crc kubenswrapper[4707]: I1204 10:00:03.663643 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75zjm\" (UniqueName: \"kubernetes.io/projected/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-kube-api-access-75zjm\") pod \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\" (UID: \"afe621d1-ee66-45f5-a0da-d2473ecc5cf1\") " Dec 04 10:00:03 crc kubenswrapper[4707]: I1204 10:00:03.664219 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-config-volume" (OuterVolumeSpecName: "config-volume") pod "afe621d1-ee66-45f5-a0da-d2473ecc5cf1" (UID: "afe621d1-ee66-45f5-a0da-d2473ecc5cf1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:00:03 crc kubenswrapper[4707]: I1204 10:00:03.668960 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-kube-api-access-75zjm" (OuterVolumeSpecName: "kube-api-access-75zjm") pod "afe621d1-ee66-45f5-a0da-d2473ecc5cf1" (UID: "afe621d1-ee66-45f5-a0da-d2473ecc5cf1"). InnerVolumeSpecName "kube-api-access-75zjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:00:03 crc kubenswrapper[4707]: I1204 10:00:03.669584 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "afe621d1-ee66-45f5-a0da-d2473ecc5cf1" (UID: "afe621d1-ee66-45f5-a0da-d2473ecc5cf1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:00:03 crc kubenswrapper[4707]: I1204 10:00:03.765136 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75zjm\" (UniqueName: \"kubernetes.io/projected/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-kube-api-access-75zjm\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:03 crc kubenswrapper[4707]: I1204 10:00:03.765176 4707 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:03 crc kubenswrapper[4707]: I1204 10:00:03.765185 4707 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afe621d1-ee66-45f5-a0da-d2473ecc5cf1-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:04 crc kubenswrapper[4707]: I1204 10:00:04.285948 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" event={"ID":"afe621d1-ee66-45f5-a0da-d2473ecc5cf1","Type":"ContainerDied","Data":"9174204f45737a91591c275e25b71629c2b79a6055fab8c2256956db8215f306"} Dec 04 10:00:04 crc kubenswrapper[4707]: I1204 10:00:04.285999 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9174204f45737a91591c275e25b71629c2b79a6055fab8c2256956db8215f306" Dec 04 10:00:04 crc kubenswrapper[4707]: I1204 10:00:04.286006 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414040-4v4w9" Dec 04 10:00:04 crc kubenswrapper[4707]: I1204 10:00:04.287829 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-2" event={"ID":"56b0d567-e030-4df4-ad01-64ac41f5dd84","Type":"ContainerStarted","Data":"d4c409784211d165546964f6a5ddee2d20382c2530b46f8d8871952cb1f8e6f3"} Dec 04 10:00:04 crc kubenswrapper[4707]: I1204 10:00:04.288656 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:04 crc kubenswrapper[4707]: I1204 10:00:04.289937 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-1" event={"ID":"9df69222-abc8-4696-9f62-5c8680f65068","Type":"ContainerStarted","Data":"1179f1c009ff9a099cce2cba1bc009ec8a59df97bfe3ebb8258e681935e2ed00"} Dec 04 10:00:04 crc kubenswrapper[4707]: I1204 10:00:04.290296 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:04 crc kubenswrapper[4707]: I1204 10:00:04.318856 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-api-2" podStartSLOduration=3.318829081 podStartE2EDuration="3.318829081s" podCreationTimestamp="2025-12-04 10:00:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:00:04.30979702 +0000 UTC m=+1303.745619527" watchObservedRunningTime="2025-12-04 10:00:04.318829081 +0000 UTC m=+1303.754651588" Dec 04 10:00:04 crc kubenswrapper[4707]: I1204 10:00:04.331910 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-api-1" podStartSLOduration=3.331885657 podStartE2EDuration="3.331885657s" podCreationTimestamp="2025-12-04 10:00:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:00:04.326694996 +0000 UTC m=+1303.762517503" watchObservedRunningTime="2025-12-04 10:00:04.331885657 +0000 UTC m=+1303.767708164" Dec 04 10:00:24 crc kubenswrapper[4707]: I1204 10:00:24.140330 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:24 crc kubenswrapper[4707]: I1204 10:00:24.189710 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:24 crc kubenswrapper[4707]: I1204 10:00:24.946832 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-2"] Dec 04 10:00:24 crc kubenswrapper[4707]: I1204 10:00:24.947712 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-2" podUID="56b0d567-e030-4df4-ad01-64ac41f5dd84" containerName="manila-api-log" containerID="cri-o://20bdd824d2438813d28b97f722219d9f5c4b5d409ffeab3cad7a78d5dd70050f" gracePeriod=30 Dec 04 10:00:24 crc kubenswrapper[4707]: I1204 10:00:24.947794 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-2" podUID="56b0d567-e030-4df4-ad01-64ac41f5dd84" containerName="manila-api" containerID="cri-o://d4c409784211d165546964f6a5ddee2d20382c2530b46f8d8871952cb1f8e6f3" gracePeriod=30 Dec 04 10:00:24 crc kubenswrapper[4707]: I1204 10:00:24.962804 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-1"] Dec 04 10:00:24 crc kubenswrapper[4707]: I1204 10:00:24.963048 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-1" podUID="9df69222-abc8-4696-9f62-5c8680f65068" containerName="manila-api-log" containerID="cri-o://56d8bdaf47be2237a55bc43fd033fc94f334251a1704a688fdf19c5dc9158905" gracePeriod=30 Dec 04 10:00:24 crc kubenswrapper[4707]: I1204 10:00:24.963493 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-1" podUID="9df69222-abc8-4696-9f62-5c8680f65068" containerName="manila-api" containerID="cri-o://1179f1c009ff9a099cce2cba1bc009ec8a59df97bfe3ebb8258e681935e2ed00" gracePeriod=30 Dec 04 10:00:25 crc kubenswrapper[4707]: I1204 10:00:25.621947 4707 generic.go:334] "Generic (PLEG): container finished" podID="56b0d567-e030-4df4-ad01-64ac41f5dd84" containerID="20bdd824d2438813d28b97f722219d9f5c4b5d409ffeab3cad7a78d5dd70050f" exitCode=143 Dec 04 10:00:25 crc kubenswrapper[4707]: I1204 10:00:25.622029 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-2" event={"ID":"56b0d567-e030-4df4-ad01-64ac41f5dd84","Type":"ContainerDied","Data":"20bdd824d2438813d28b97f722219d9f5c4b5d409ffeab3cad7a78d5dd70050f"} Dec 04 10:00:25 crc kubenswrapper[4707]: I1204 10:00:25.625664 4707 generic.go:334] "Generic (PLEG): container finished" podID="9df69222-abc8-4696-9f62-5c8680f65068" containerID="56d8bdaf47be2237a55bc43fd033fc94f334251a1704a688fdf19c5dc9158905" exitCode=143 Dec 04 10:00:25 crc kubenswrapper[4707]: I1204 10:00:25.625692 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-1" event={"ID":"9df69222-abc8-4696-9f62-5c8680f65068","Type":"ContainerDied","Data":"56d8bdaf47be2237a55bc43fd033fc94f334251a1704a688fdf19c5dc9158905"} Dec 04 10:00:28 crc kubenswrapper[4707]: I1204 10:00:28.647978 4707 generic.go:334] "Generic (PLEG): container finished" podID="56b0d567-e030-4df4-ad01-64ac41f5dd84" containerID="d4c409784211d165546964f6a5ddee2d20382c2530b46f8d8871952cb1f8e6f3" exitCode=0 Dec 04 10:00:28 crc kubenswrapper[4707]: I1204 10:00:28.648281 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-2" event={"ID":"56b0d567-e030-4df4-ad01-64ac41f5dd84","Type":"ContainerDied","Data":"d4c409784211d165546964f6a5ddee2d20382c2530b46f8d8871952cb1f8e6f3"} Dec 04 10:00:28 crc kubenswrapper[4707]: I1204 10:00:28.650972 4707 generic.go:334] "Generic (PLEG): container finished" podID="9df69222-abc8-4696-9f62-5c8680f65068" containerID="1179f1c009ff9a099cce2cba1bc009ec8a59df97bfe3ebb8258e681935e2ed00" exitCode=0 Dec 04 10:00:28 crc kubenswrapper[4707]: I1204 10:00:28.651016 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-1" event={"ID":"9df69222-abc8-4696-9f62-5c8680f65068","Type":"ContainerDied","Data":"1179f1c009ff9a099cce2cba1bc009ec8a59df97bfe3ebb8258e681935e2ed00"} Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.237311 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.243723 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.314264 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-scripts\") pod \"56b0d567-e030-4df4-ad01-64ac41f5dd84\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.314348 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9df69222-abc8-4696-9f62-5c8680f65068-etc-machine-id\") pod \"9df69222-abc8-4696-9f62-5c8680f65068\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.314373 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9df69222-abc8-4696-9f62-5c8680f65068-logs\") pod \"9df69222-abc8-4696-9f62-5c8680f65068\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.314397 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-config-data\") pod \"9df69222-abc8-4696-9f62-5c8680f65068\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.314439 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-config-data\") pod \"56b0d567-e030-4df4-ad01-64ac41f5dd84\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.314473 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-config-data-custom\") pod \"56b0d567-e030-4df4-ad01-64ac41f5dd84\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.314516 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-scripts\") pod \"9df69222-abc8-4696-9f62-5c8680f65068\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.314548 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56b0d567-e030-4df4-ad01-64ac41f5dd84-logs\") pod \"56b0d567-e030-4df4-ad01-64ac41f5dd84\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.314562 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-config-data-custom\") pod \"9df69222-abc8-4696-9f62-5c8680f65068\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.314578 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56b0d567-e030-4df4-ad01-64ac41f5dd84-etc-machine-id\") pod \"56b0d567-e030-4df4-ad01-64ac41f5dd84\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.314633 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h8tc\" (UniqueName: \"kubernetes.io/projected/9df69222-abc8-4696-9f62-5c8680f65068-kube-api-access-4h8tc\") pod \"9df69222-abc8-4696-9f62-5c8680f65068\" (UID: \"9df69222-abc8-4696-9f62-5c8680f65068\") " Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.314679 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8f24w\" (UniqueName: \"kubernetes.io/projected/56b0d567-e030-4df4-ad01-64ac41f5dd84-kube-api-access-8f24w\") pod \"56b0d567-e030-4df4-ad01-64ac41f5dd84\" (UID: \"56b0d567-e030-4df4-ad01-64ac41f5dd84\") " Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.316059 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9df69222-abc8-4696-9f62-5c8680f65068-logs" (OuterVolumeSpecName: "logs") pod "9df69222-abc8-4696-9f62-5c8680f65068" (UID: "9df69222-abc8-4696-9f62-5c8680f65068"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.316545 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9df69222-abc8-4696-9f62-5c8680f65068-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9df69222-abc8-4696-9f62-5c8680f65068" (UID: "9df69222-abc8-4696-9f62-5c8680f65068"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.316640 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/56b0d567-e030-4df4-ad01-64ac41f5dd84-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "56b0d567-e030-4df4-ad01-64ac41f5dd84" (UID: "56b0d567-e030-4df4-ad01-64ac41f5dd84"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.316890 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56b0d567-e030-4df4-ad01-64ac41f5dd84-logs" (OuterVolumeSpecName: "logs") pod "56b0d567-e030-4df4-ad01-64ac41f5dd84" (UID: "56b0d567-e030-4df4-ad01-64ac41f5dd84"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.321655 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56b0d567-e030-4df4-ad01-64ac41f5dd84-kube-api-access-8f24w" (OuterVolumeSpecName: "kube-api-access-8f24w") pod "56b0d567-e030-4df4-ad01-64ac41f5dd84" (UID: "56b0d567-e030-4df4-ad01-64ac41f5dd84"). InnerVolumeSpecName "kube-api-access-8f24w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.321719 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9df69222-abc8-4696-9f62-5c8680f65068" (UID: "9df69222-abc8-4696-9f62-5c8680f65068"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.321924 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-scripts" (OuterVolumeSpecName: "scripts") pod "56b0d567-e030-4df4-ad01-64ac41f5dd84" (UID: "56b0d567-e030-4df4-ad01-64ac41f5dd84"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.323396 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-scripts" (OuterVolumeSpecName: "scripts") pod "9df69222-abc8-4696-9f62-5c8680f65068" (UID: "9df69222-abc8-4696-9f62-5c8680f65068"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.323691 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "56b0d567-e030-4df4-ad01-64ac41f5dd84" (UID: "56b0d567-e030-4df4-ad01-64ac41f5dd84"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.336710 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9df69222-abc8-4696-9f62-5c8680f65068-kube-api-access-4h8tc" (OuterVolumeSpecName: "kube-api-access-4h8tc") pod "9df69222-abc8-4696-9f62-5c8680f65068" (UID: "9df69222-abc8-4696-9f62-5c8680f65068"). InnerVolumeSpecName "kube-api-access-4h8tc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.354256 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-config-data" (OuterVolumeSpecName: "config-data") pod "56b0d567-e030-4df4-ad01-64ac41f5dd84" (UID: "56b0d567-e030-4df4-ad01-64ac41f5dd84"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.354849 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-config-data" (OuterVolumeSpecName: "config-data") pod "9df69222-abc8-4696-9f62-5c8680f65068" (UID: "9df69222-abc8-4696-9f62-5c8680f65068"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.416122 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.416163 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.416172 4707 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56b0d567-e030-4df4-ad01-64ac41f5dd84-logs\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.416180 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.416190 4707 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56b0d567-e030-4df4-ad01-64ac41f5dd84-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.416200 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h8tc\" (UniqueName: \"kubernetes.io/projected/9df69222-abc8-4696-9f62-5c8680f65068-kube-api-access-4h8tc\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.416211 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8f24w\" (UniqueName: \"kubernetes.io/projected/56b0d567-e030-4df4-ad01-64ac41f5dd84-kube-api-access-8f24w\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.416219 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.416227 4707 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9df69222-abc8-4696-9f62-5c8680f65068-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.416234 4707 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9df69222-abc8-4696-9f62-5c8680f65068-logs\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.416243 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9df69222-abc8-4696-9f62-5c8680f65068-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.416251 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56b0d567-e030-4df4-ad01-64ac41f5dd84-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.660927 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-1" event={"ID":"9df69222-abc8-4696-9f62-5c8680f65068","Type":"ContainerDied","Data":"2e6ece3ad68a07e2c372ec229fc6c9b9a7a632a5612e79a3f91585b39351144e"} Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.660957 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-1" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.661283 4707 scope.go:117] "RemoveContainer" containerID="1179f1c009ff9a099cce2cba1bc009ec8a59df97bfe3ebb8258e681935e2ed00" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.666604 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-2" event={"ID":"56b0d567-e030-4df4-ad01-64ac41f5dd84","Type":"ContainerDied","Data":"06710237e20776fb327765744caa1ae292f4bd848796f716803c117ae3dba801"} Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.666653 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-2" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.690651 4707 scope.go:117] "RemoveContainer" containerID="56d8bdaf47be2237a55bc43fd033fc94f334251a1704a688fdf19c5dc9158905" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.703704 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-1"] Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.721397 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-api-1"] Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.726453 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-2"] Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.731629 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-api-2"] Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.733780 4707 scope.go:117] "RemoveContainer" containerID="d4c409784211d165546964f6a5ddee2d20382c2530b46f8d8871952cb1f8e6f3" Dec 04 10:00:29 crc kubenswrapper[4707]: I1204 10:00:29.753569 4707 scope.go:117] "RemoveContainer" containerID="20bdd824d2438813d28b97f722219d9f5c4b5d409ffeab3cad7a78d5dd70050f" Dec 04 10:00:30 crc kubenswrapper[4707]: I1204 10:00:30.852642 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56b0d567-e030-4df4-ad01-64ac41f5dd84" path="/var/lib/kubelet/pods/56b0d567-e030-4df4-ad01-64ac41f5dd84/volumes" Dec 04 10:00:30 crc kubenswrapper[4707]: I1204 10:00:30.853580 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9df69222-abc8-4696-9f62-5c8680f65068" path="/var/lib/kubelet/pods/9df69222-abc8-4696-9f62-5c8680f65068/volumes" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.053645 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-scheduler-1"] Dec 04 10:00:31 crc kubenswrapper[4707]: E1204 10:00:31.054049 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9df69222-abc8-4696-9f62-5c8680f65068" containerName="manila-api" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.054075 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="9df69222-abc8-4696-9f62-5c8680f65068" containerName="manila-api" Dec 04 10:00:31 crc kubenswrapper[4707]: E1204 10:00:31.054089 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afe621d1-ee66-45f5-a0da-d2473ecc5cf1" containerName="collect-profiles" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.054100 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="afe621d1-ee66-45f5-a0da-d2473ecc5cf1" containerName="collect-profiles" Dec 04 10:00:31 crc kubenswrapper[4707]: E1204 10:00:31.054113 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56b0d567-e030-4df4-ad01-64ac41f5dd84" containerName="manila-api-log" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.054122 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="56b0d567-e030-4df4-ad01-64ac41f5dd84" containerName="manila-api-log" Dec 04 10:00:31 crc kubenswrapper[4707]: E1204 10:00:31.054143 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56b0d567-e030-4df4-ad01-64ac41f5dd84" containerName="manila-api" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.054152 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="56b0d567-e030-4df4-ad01-64ac41f5dd84" containerName="manila-api" Dec 04 10:00:31 crc kubenswrapper[4707]: E1204 10:00:31.054174 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9df69222-abc8-4696-9f62-5c8680f65068" containerName="manila-api-log" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.054181 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="9df69222-abc8-4696-9f62-5c8680f65068" containerName="manila-api-log" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.054323 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="56b0d567-e030-4df4-ad01-64ac41f5dd84" containerName="manila-api-log" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.054371 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="9df69222-abc8-4696-9f62-5c8680f65068" containerName="manila-api-log" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.054386 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="afe621d1-ee66-45f5-a0da-d2473ecc5cf1" containerName="collect-profiles" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.054399 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="9df69222-abc8-4696-9f62-5c8680f65068" containerName="manila-api" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.054411 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="56b0d567-e030-4df4-ad01-64ac41f5dd84" containerName="manila-api" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.055271 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.062356 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-1"] Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.150276 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-config-data-custom\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.150673 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-config-data\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.150697 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c712367d-523c-41a8-9b9f-d9644cda0c26-etc-machine-id\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.150721 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-scripts\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.150790 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq9t4\" (UniqueName: \"kubernetes.io/projected/c712367d-523c-41a8-9b9f-d9644cda0c26-kube-api-access-nq9t4\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.252825 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-config-data-custom\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.252910 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-config-data\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.252931 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c712367d-523c-41a8-9b9f-d9644cda0c26-etc-machine-id\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.252953 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-scripts\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.253002 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq9t4\" (UniqueName: \"kubernetes.io/projected/c712367d-523c-41a8-9b9f-d9644cda0c26-kube-api-access-nq9t4\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.253117 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c712367d-523c-41a8-9b9f-d9644cda0c26-etc-machine-id\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.257488 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-scripts\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.257553 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-config-data-custom\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.258619 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-config-data\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.276999 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq9t4\" (UniqueName: \"kubernetes.io/projected/c712367d-523c-41a8-9b9f-d9644cda0c26-kube-api-access-nq9t4\") pod \"manila-scheduler-1\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.372729 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.599831 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-1"] Dec 04 10:00:31 crc kubenswrapper[4707]: I1204 10:00:31.686273 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-1" event={"ID":"c712367d-523c-41a8-9b9f-d9644cda0c26","Type":"ContainerStarted","Data":"60afd4d61bd83b32fb00210ab034f16ba2f927daa5488b91a3609a9143f16d23"} Dec 04 10:00:32 crc kubenswrapper[4707]: I1204 10:00:32.697084 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-1" event={"ID":"c712367d-523c-41a8-9b9f-d9644cda0c26","Type":"ContainerStarted","Data":"43ce014d59bd213df9a2315a82826832aaf5cd00d537923234d1740d9c3e9da1"} Dec 04 10:00:32 crc kubenswrapper[4707]: I1204 10:00:32.697670 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-1" event={"ID":"c712367d-523c-41a8-9b9f-d9644cda0c26","Type":"ContainerStarted","Data":"2912e7adbbf77c41861d61d3ab42cfd1ce8ae655c3b45ae51df29dfc9d49f761"} Dec 04 10:00:32 crc kubenswrapper[4707]: I1204 10:00:32.735989 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-scheduler-1" podStartSLOduration=1.735967489 podStartE2EDuration="1.735967489s" podCreationTimestamp="2025-12-04 10:00:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:00:32.72991311 +0000 UTC m=+1332.165735617" watchObservedRunningTime="2025-12-04 10:00:32.735967489 +0000 UTC m=+1332.171789996" Dec 04 10:00:41 crc kubenswrapper[4707]: I1204 10:00:41.373405 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.230681 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.300719 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-scheduler-2"] Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.301748 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.315447 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-2"] Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.441296 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-config-data-custom\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.441413 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-etc-machine-id\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.441454 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-config-data\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.441491 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-scripts\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.441540 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvv8k\" (UniqueName: \"kubernetes.io/projected/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-kube-api-access-gvv8k\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.542551 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-config-data-custom\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.542920 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-etc-machine-id\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.542950 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-config-data\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.542979 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-scripts\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.543019 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvv8k\" (UniqueName: \"kubernetes.io/projected/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-kube-api-access-gvv8k\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.543042 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-etc-machine-id\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.550915 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-config-data\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.551515 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-scripts\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.551604 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-config-data-custom\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.566854 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvv8k\" (UniqueName: \"kubernetes.io/projected/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-kube-api-access-gvv8k\") pod \"manila-scheduler-2\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:43 crc kubenswrapper[4707]: I1204 10:00:43.624614 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:00:44 crc kubenswrapper[4707]: I1204 10:00:44.046792 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-2"] Dec 04 10:00:44 crc kubenswrapper[4707]: I1204 10:00:44.803775 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-2" event={"ID":"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db","Type":"ContainerStarted","Data":"27f0a9cc5a29f6b54920e0e5fbc8f733b0653c3fee7eb3148a2c54dcbf5ae34a"} Dec 04 10:00:44 crc kubenswrapper[4707]: I1204 10:00:44.804161 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-2" event={"ID":"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db","Type":"ContainerStarted","Data":"2a44d926c263d9d3d22b81186e418cf00cf8c926510f7060572d49a20ffd861b"} Dec 04 10:00:44 crc kubenswrapper[4707]: I1204 10:00:44.804181 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-2" event={"ID":"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db","Type":"ContainerStarted","Data":"a75946273b9a8bfe84421bd5afe23459f435b1ca3526e8bbf059d833b3971257"} Dec 04 10:00:44 crc kubenswrapper[4707]: I1204 10:00:44.833382 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-scheduler-2" podStartSLOduration=1.833360561 podStartE2EDuration="1.833360561s" podCreationTimestamp="2025-12-04 10:00:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:00:44.824824076 +0000 UTC m=+1344.260646583" watchObservedRunningTime="2025-12-04 10:00:44.833360561 +0000 UTC m=+1344.269183088" Dec 04 10:00:53 crc kubenswrapper[4707]: I1204 10:00:53.625783 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.140105 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/keystone-cron-29414041-7mcts"] Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.141714 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.151095 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-cron-29414041-7mcts"] Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.326551 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/452395d2-196f-415a-9d87-47e73b329310-config-data\") pod \"keystone-cron-29414041-7mcts\" (UID: \"452395d2-196f-415a-9d87-47e73b329310\") " pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.326717 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwk97\" (UniqueName: \"kubernetes.io/projected/452395d2-196f-415a-9d87-47e73b329310-kube-api-access-dwk97\") pod \"keystone-cron-29414041-7mcts\" (UID: \"452395d2-196f-415a-9d87-47e73b329310\") " pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.326760 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/452395d2-196f-415a-9d87-47e73b329310-fernet-keys\") pod \"keystone-cron-29414041-7mcts\" (UID: \"452395d2-196f-415a-9d87-47e73b329310\") " pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.427864 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/452395d2-196f-415a-9d87-47e73b329310-config-data\") pod \"keystone-cron-29414041-7mcts\" (UID: \"452395d2-196f-415a-9d87-47e73b329310\") " pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.427954 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwk97\" (UniqueName: \"kubernetes.io/projected/452395d2-196f-415a-9d87-47e73b329310-kube-api-access-dwk97\") pod \"keystone-cron-29414041-7mcts\" (UID: \"452395d2-196f-415a-9d87-47e73b329310\") " pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.428009 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/452395d2-196f-415a-9d87-47e73b329310-fernet-keys\") pod \"keystone-cron-29414041-7mcts\" (UID: \"452395d2-196f-415a-9d87-47e73b329310\") " pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.436968 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/452395d2-196f-415a-9d87-47e73b329310-config-data\") pod \"keystone-cron-29414041-7mcts\" (UID: \"452395d2-196f-415a-9d87-47e73b329310\") " pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.439627 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/452395d2-196f-415a-9d87-47e73b329310-fernet-keys\") pod \"keystone-cron-29414041-7mcts\" (UID: \"452395d2-196f-415a-9d87-47e73b329310\") " pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.448443 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwk97\" (UniqueName: \"kubernetes.io/projected/452395d2-196f-415a-9d87-47e73b329310-kube-api-access-dwk97\") pod \"keystone-cron-29414041-7mcts\" (UID: \"452395d2-196f-415a-9d87-47e73b329310\") " pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.465175 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.889750 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-cron-29414041-7mcts"] Dec 04 10:01:00 crc kubenswrapper[4707]: I1204 10:01:00.936140 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" event={"ID":"452395d2-196f-415a-9d87-47e73b329310","Type":"ContainerStarted","Data":"f066102a3aa706701b248bbd21a7418ceff1307e7ea92bcfb02edc71bb5210bf"} Dec 04 10:01:01 crc kubenswrapper[4707]: I1204 10:01:01.944392 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" event={"ID":"452395d2-196f-415a-9d87-47e73b329310","Type":"ContainerStarted","Data":"f54c3cee1446aca41c1854835c53c1fd4596dcc8cc3bb39524ad221554491d40"} Dec 04 10:01:01 crc kubenswrapper[4707]: I1204 10:01:01.968996 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" podStartSLOduration=1.9689729790000001 podStartE2EDuration="1.968972979s" podCreationTimestamp="2025-12-04 10:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:01:01.9622312 +0000 UTC m=+1361.398053707" watchObservedRunningTime="2025-12-04 10:01:01.968972979 +0000 UTC m=+1361.404795486" Dec 04 10:01:03 crc kubenswrapper[4707]: I1204 10:01:03.960131 4707 generic.go:334] "Generic (PLEG): container finished" podID="452395d2-196f-415a-9d87-47e73b329310" containerID="f54c3cee1446aca41c1854835c53c1fd4596dcc8cc3bb39524ad221554491d40" exitCode=0 Dec 04 10:01:03 crc kubenswrapper[4707]: I1204 10:01:03.960582 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" event={"ID":"452395d2-196f-415a-9d87-47e73b329310","Type":"ContainerDied","Data":"f54c3cee1446aca41c1854835c53c1fd4596dcc8cc3bb39524ad221554491d40"} Dec 04 10:01:05 crc kubenswrapper[4707]: I1204 10:01:05.337849 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" Dec 04 10:01:05 crc kubenswrapper[4707]: I1204 10:01:05.400586 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:01:05 crc kubenswrapper[4707]: I1204 10:01:05.533452 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/452395d2-196f-415a-9d87-47e73b329310-fernet-keys\") pod \"452395d2-196f-415a-9d87-47e73b329310\" (UID: \"452395d2-196f-415a-9d87-47e73b329310\") " Dec 04 10:01:05 crc kubenswrapper[4707]: I1204 10:01:05.533621 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/452395d2-196f-415a-9d87-47e73b329310-config-data\") pod \"452395d2-196f-415a-9d87-47e73b329310\" (UID: \"452395d2-196f-415a-9d87-47e73b329310\") " Dec 04 10:01:05 crc kubenswrapper[4707]: I1204 10:01:05.533702 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwk97\" (UniqueName: \"kubernetes.io/projected/452395d2-196f-415a-9d87-47e73b329310-kube-api-access-dwk97\") pod \"452395d2-196f-415a-9d87-47e73b329310\" (UID: \"452395d2-196f-415a-9d87-47e73b329310\") " Dec 04 10:01:05 crc kubenswrapper[4707]: I1204 10:01:05.539322 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/452395d2-196f-415a-9d87-47e73b329310-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "452395d2-196f-415a-9d87-47e73b329310" (UID: "452395d2-196f-415a-9d87-47e73b329310"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:05 crc kubenswrapper[4707]: I1204 10:01:05.539492 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/452395d2-196f-415a-9d87-47e73b329310-kube-api-access-dwk97" (OuterVolumeSpecName: "kube-api-access-dwk97") pod "452395d2-196f-415a-9d87-47e73b329310" (UID: "452395d2-196f-415a-9d87-47e73b329310"). InnerVolumeSpecName "kube-api-access-dwk97". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:05 crc kubenswrapper[4707]: I1204 10:01:05.567626 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/452395d2-196f-415a-9d87-47e73b329310-config-data" (OuterVolumeSpecName: "config-data") pod "452395d2-196f-415a-9d87-47e73b329310" (UID: "452395d2-196f-415a-9d87-47e73b329310"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:05 crc kubenswrapper[4707]: I1204 10:01:05.634963 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwk97\" (UniqueName: \"kubernetes.io/projected/452395d2-196f-415a-9d87-47e73b329310-kube-api-access-dwk97\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:05 crc kubenswrapper[4707]: I1204 10:01:05.635001 4707 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/452395d2-196f-415a-9d87-47e73b329310-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:05 crc kubenswrapper[4707]: I1204 10:01:05.635013 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/452395d2-196f-415a-9d87-47e73b329310-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:05 crc kubenswrapper[4707]: I1204 10:01:05.975744 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" event={"ID":"452395d2-196f-415a-9d87-47e73b329310","Type":"ContainerDied","Data":"f066102a3aa706701b248bbd21a7418ceff1307e7ea92bcfb02edc71bb5210bf"} Dec 04 10:01:05 crc kubenswrapper[4707]: I1204 10:01:05.975794 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f066102a3aa706701b248bbd21a7418ceff1307e7ea92bcfb02edc71bb5210bf" Dec 04 10:01:05 crc kubenswrapper[4707]: I1204 10:01:05.975806 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-cron-29414041-7mcts" Dec 04 10:01:07 crc kubenswrapper[4707]: I1204 10:01:07.981417 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-db-sync-tp72z"] Dec 04 10:01:07 crc kubenswrapper[4707]: I1204 10:01:07.989738 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-db-sync-tp72z"] Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.011164 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.011530 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-0" podUID="8d7b1bef-7bf1-4789-8783-5552386590a5" containerName="manila-scheduler" containerID="cri-o://5aadecca11b44e952be025178b282fb3df063eeb5bf621caa173d1097dc1f9bf" gracePeriod=30 Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.011622 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-0" podUID="8d7b1bef-7bf1-4789-8783-5552386590a5" containerName="probe" containerID="cri-o://d4fd869bcee883df44d64b0a566760487cf30a191951d126f1bf926e4c62882e" gracePeriod=30 Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.026283 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-2"] Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.026647 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-2" podUID="f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" containerName="manila-scheduler" containerID="cri-o://2a44d926c263d9d3d22b81186e418cf00cf8c926510f7060572d49a20ffd861b" gracePeriod=30 Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.026824 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-2" podUID="f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" containerName="probe" containerID="cri-o://27f0a9cc5a29f6b54920e0e5fbc8f733b0653c3fee7eb3148a2c54dcbf5ae34a" gracePeriod=30 Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.036094 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-1"] Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.036325 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-1" podUID="c712367d-523c-41a8-9b9f-d9644cda0c26" containerName="manila-scheduler" containerID="cri-o://2912e7adbbf77c41861d61d3ab42cfd1ce8ae655c3b45ae51df29dfc9d49f761" gracePeriod=30 Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.036628 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-1" podUID="c712367d-523c-41a8-9b9f-d9644cda0c26" containerName="probe" containerID="cri-o://43ce014d59bd213df9a2315a82826832aaf5cd00d537923234d1740d9c3e9da1" gracePeriod=30 Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.042136 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.042397 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share0-0" podUID="a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" containerName="manila-share" containerID="cri-o://34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4" gracePeriod=30 Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.042532 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share0-0" podUID="a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" containerName="probe" containerID="cri-o://70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e" gracePeriod=30 Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.068441 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.068939 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-0" podUID="7facb196-5d86-4e0c-9fad-0fb8fd19cce2" containerName="manila-api-log" containerID="cri-o://c2fcf1b6e43bea6c10ebbd2f9828bad67e83be5b8fcb8f2e18629719f1326c3c" gracePeriod=30 Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.069378 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-0" podUID="7facb196-5d86-4e0c-9fad-0fb8fd19cce2" containerName="manila-api" containerID="cri-o://7c1c69f436be516d26324399596887dabf212ddc28256aed3c63e429422453dd" gracePeriod=30 Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.083800 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila9bcb-account-delete-nlccv"] Dec 04 10:01:08 crc kubenswrapper[4707]: E1204 10:01:08.084114 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="452395d2-196f-415a-9d87-47e73b329310" containerName="keystone-cron" Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.084129 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="452395d2-196f-415a-9d87-47e73b329310" containerName="keystone-cron" Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.084295 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="452395d2-196f-415a-9d87-47e73b329310" containerName="keystone-cron" Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.085006 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.095963 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila9bcb-account-delete-nlccv"] Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.271754 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/748f7b8f-ec6a-4a9d-a3c6-41a838028863-operator-scripts\") pod \"manila9bcb-account-delete-nlccv\" (UID: \"748f7b8f-ec6a-4a9d-a3c6-41a838028863\") " pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.271817 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-897wc\" (UniqueName: \"kubernetes.io/projected/748f7b8f-ec6a-4a9d-a3c6-41a838028863-kube-api-access-897wc\") pod \"manila9bcb-account-delete-nlccv\" (UID: \"748f7b8f-ec6a-4a9d-a3c6-41a838028863\") " pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.373497 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/748f7b8f-ec6a-4a9d-a3c6-41a838028863-operator-scripts\") pod \"manila9bcb-account-delete-nlccv\" (UID: \"748f7b8f-ec6a-4a9d-a3c6-41a838028863\") " pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.373549 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-897wc\" (UniqueName: \"kubernetes.io/projected/748f7b8f-ec6a-4a9d-a3c6-41a838028863-kube-api-access-897wc\") pod \"manila9bcb-account-delete-nlccv\" (UID: \"748f7b8f-ec6a-4a9d-a3c6-41a838028863\") " pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.374670 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/748f7b8f-ec6a-4a9d-a3c6-41a838028863-operator-scripts\") pod \"manila9bcb-account-delete-nlccv\" (UID: \"748f7b8f-ec6a-4a9d-a3c6-41a838028863\") " pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.394085 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-897wc\" (UniqueName: \"kubernetes.io/projected/748f7b8f-ec6a-4a9d-a3c6-41a838028863-kube-api-access-897wc\") pod \"manila9bcb-account-delete-nlccv\" (UID: \"748f7b8f-ec6a-4a9d-a3c6-41a838028863\") " pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.416585 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.714936 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila9bcb-account-delete-nlccv"] Dec 04 10:01:08 crc kubenswrapper[4707]: E1204 10:01:08.810763 4707 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7cc3e1c_5829_4a8e_9bfd_6829eaca788c.slice/crio-conmon-34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4.scope\": RecentStats: unable to find data in memory cache]" Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.859741 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d79960f1-b80d-4f51-aabb-64ce739c03ce" path="/var/lib/kubelet/pods/d79960f1-b80d-4f51-aabb-64ce739c03ce/volumes" Dec 04 10:01:08 crc kubenswrapper[4707]: I1204 10:01:08.939435 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.003240 4707 generic.go:334] "Generic (PLEG): container finished" podID="7facb196-5d86-4e0c-9fad-0fb8fd19cce2" containerID="c2fcf1b6e43bea6c10ebbd2f9828bad67e83be5b8fcb8f2e18629719f1326c3c" exitCode=143 Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.003305 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"7facb196-5d86-4e0c-9fad-0fb8fd19cce2","Type":"ContainerDied","Data":"c2fcf1b6e43bea6c10ebbd2f9828bad67e83be5b8fcb8f2e18629719f1326c3c"} Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.005943 4707 generic.go:334] "Generic (PLEG): container finished" podID="f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" containerID="27f0a9cc5a29f6b54920e0e5fbc8f733b0653c3fee7eb3148a2c54dcbf5ae34a" exitCode=0 Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.005997 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-2" event={"ID":"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db","Type":"ContainerDied","Data":"27f0a9cc5a29f6b54920e0e5fbc8f733b0653c3fee7eb3148a2c54dcbf5ae34a"} Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.007708 4707 generic.go:334] "Generic (PLEG): container finished" podID="a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" containerID="70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e" exitCode=0 Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.007730 4707 generic.go:334] "Generic (PLEG): container finished" podID="a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" containerID="34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4" exitCode=1 Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.007762 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c","Type":"ContainerDied","Data":"70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e"} Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.007783 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c","Type":"ContainerDied","Data":"34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4"} Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.007796 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c","Type":"ContainerDied","Data":"819e6df12d3505a8f9a20495e16708ae16621b3cd91ae37491f213606262d961"} Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.007810 4707 scope.go:117] "RemoveContainer" containerID="70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.007933 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.012925 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" event={"ID":"748f7b8f-ec6a-4a9d-a3c6-41a838028863","Type":"ContainerStarted","Data":"dce391e07dc0c067ad013719a113982a7b65aefbb658700830a0a7e6d53477d4"} Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.012987 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" event={"ID":"748f7b8f-ec6a-4a9d-a3c6-41a838028863","Type":"ContainerStarted","Data":"4e49c0aa7a3ee10141d202a2b63771f9dcac702a7dfd3521b1c435bfb093216a"} Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.018970 4707 generic.go:334] "Generic (PLEG): container finished" podID="c712367d-523c-41a8-9b9f-d9644cda0c26" containerID="43ce014d59bd213df9a2315a82826832aaf5cd00d537923234d1740d9c3e9da1" exitCode=0 Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.019064 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-1" event={"ID":"c712367d-523c-41a8-9b9f-d9644cda0c26","Type":"ContainerDied","Data":"43ce014d59bd213df9a2315a82826832aaf5cd00d537923234d1740d9c3e9da1"} Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.021010 4707 generic.go:334] "Generic (PLEG): container finished" podID="8d7b1bef-7bf1-4789-8783-5552386590a5" containerID="d4fd869bcee883df44d64b0a566760487cf30a191951d126f1bf926e4c62882e" exitCode=0 Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.021059 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"8d7b1bef-7bf1-4789-8783-5552386590a5","Type":"ContainerDied","Data":"d4fd869bcee883df44d64b0a566760487cf30a191951d126f1bf926e4c62882e"} Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.031441 4707 scope.go:117] "RemoveContainer" containerID="34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.033259 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" podStartSLOduration=1.033236643 podStartE2EDuration="1.033236643s" podCreationTimestamp="2025-12-04 10:01:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:01:09.029399744 +0000 UTC m=+1368.465222261" watchObservedRunningTime="2025-12-04 10:01:09.033236643 +0000 UTC m=+1368.469059150" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.051710 4707 scope.go:117] "RemoveContainer" containerID="70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e" Dec 04 10:01:09 crc kubenswrapper[4707]: E1204 10:01:09.052195 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e\": container with ID starting with 70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e not found: ID does not exist" containerID="70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.052232 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e"} err="failed to get container status \"70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e\": rpc error: code = NotFound desc = could not find container \"70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e\": container with ID starting with 70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e not found: ID does not exist" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.052253 4707 scope.go:117] "RemoveContainer" containerID="34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4" Dec 04 10:01:09 crc kubenswrapper[4707]: E1204 10:01:09.053193 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4\": container with ID starting with 34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4 not found: ID does not exist" containerID="34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.053242 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4"} err="failed to get container status \"34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4\": rpc error: code = NotFound desc = could not find container \"34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4\": container with ID starting with 34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4 not found: ID does not exist" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.054297 4707 scope.go:117] "RemoveContainer" containerID="70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.055752 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e"} err="failed to get container status \"70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e\": rpc error: code = NotFound desc = could not find container \"70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e\": container with ID starting with 70e2c03c1142b46a186a04a34fddddb29a0d5dea75194a369a1542eef450a94e not found: ID does not exist" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.055779 4707 scope.go:117] "RemoveContainer" containerID="34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.057056 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4"} err="failed to get container status \"34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4\": rpc error: code = NotFound desc = could not find container \"34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4\": container with ID starting with 34a5d1d204231ac4f52d60d3be7c41c5c7aba532f9d3c545b55bbf6931d92bd4 not found: ID does not exist" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.082961 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjtb2\" (UniqueName: \"kubernetes.io/projected/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-kube-api-access-pjtb2\") pod \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.083411 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-config-data\") pod \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.083488 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-config-data-custom\") pod \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.083566 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-scripts\") pod \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.083638 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-var-lib-manila\") pod \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.083673 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-ceph\") pod \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.083703 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-etc-machine-id\") pod \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\" (UID: \"a7cc3e1c-5829-4a8e-9bfd-6829eaca788c\") " Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.084110 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" (UID: "a7cc3e1c-5829-4a8e-9bfd-6829eaca788c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.086241 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" (UID: "a7cc3e1c-5829-4a8e-9bfd-6829eaca788c"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.090871 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" (UID: "a7cc3e1c-5829-4a8e-9bfd-6829eaca788c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.091380 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-kube-api-access-pjtb2" (OuterVolumeSpecName: "kube-api-access-pjtb2") pod "a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" (UID: "a7cc3e1c-5829-4a8e-9bfd-6829eaca788c"). InnerVolumeSpecName "kube-api-access-pjtb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.091772 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-ceph" (OuterVolumeSpecName: "ceph") pod "a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" (UID: "a7cc3e1c-5829-4a8e-9bfd-6829eaca788c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.095161 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-scripts" (OuterVolumeSpecName: "scripts") pod "a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" (UID: "a7cc3e1c-5829-4a8e-9bfd-6829eaca788c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.150511 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-config-data" (OuterVolumeSpecName: "config-data") pod "a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" (UID: "a7cc3e1c-5829-4a8e-9bfd-6829eaca788c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.185513 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.185557 4707 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-var-lib-manila\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.185568 4707 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-ceph\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.185577 4707 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.185586 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjtb2\" (UniqueName: \"kubernetes.io/projected/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-kube-api-access-pjtb2\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.185598 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.185608 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.353106 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:01:09 crc kubenswrapper[4707]: I1204 10:01:09.372319 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:01:10 crc kubenswrapper[4707]: I1204 10:01:10.029080 4707 generic.go:334] "Generic (PLEG): container finished" podID="748f7b8f-ec6a-4a9d-a3c6-41a838028863" containerID="dce391e07dc0c067ad013719a113982a7b65aefbb658700830a0a7e6d53477d4" exitCode=0 Dec 04 10:01:10 crc kubenswrapper[4707]: I1204 10:01:10.029155 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" event={"ID":"748f7b8f-ec6a-4a9d-a3c6-41a838028863","Type":"ContainerDied","Data":"dce391e07dc0c067ad013719a113982a7b65aefbb658700830a0a7e6d53477d4"} Dec 04 10:01:10 crc kubenswrapper[4707]: I1204 10:01:10.855830 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" path="/var/lib/kubelet/pods/a7cc3e1c-5829-4a8e-9bfd-6829eaca788c/volumes" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.541171 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.553032 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.725256 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d7b1bef-7bf1-4789-8783-5552386590a5-etc-machine-id\") pod \"8d7b1bef-7bf1-4789-8783-5552386590a5\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.725325 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/748f7b8f-ec6a-4a9d-a3c6-41a838028863-operator-scripts\") pod \"748f7b8f-ec6a-4a9d-a3c6-41a838028863\" (UID: \"748f7b8f-ec6a-4a9d-a3c6-41a838028863\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.725373 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-897wc\" (UniqueName: \"kubernetes.io/projected/748f7b8f-ec6a-4a9d-a3c6-41a838028863-kube-api-access-897wc\") pod \"748f7b8f-ec6a-4a9d-a3c6-41a838028863\" (UID: \"748f7b8f-ec6a-4a9d-a3c6-41a838028863\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.725407 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8d7b1bef-7bf1-4789-8783-5552386590a5-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8d7b1bef-7bf1-4789-8783-5552386590a5" (UID: "8d7b1bef-7bf1-4789-8783-5552386590a5"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.725432 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-scripts\") pod \"8d7b1bef-7bf1-4789-8783-5552386590a5\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.725458 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2md5\" (UniqueName: \"kubernetes.io/projected/8d7b1bef-7bf1-4789-8783-5552386590a5-kube-api-access-m2md5\") pod \"8d7b1bef-7bf1-4789-8783-5552386590a5\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.725488 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-config-data\") pod \"8d7b1bef-7bf1-4789-8783-5552386590a5\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.725546 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-config-data-custom\") pod \"8d7b1bef-7bf1-4789-8783-5552386590a5\" (UID: \"8d7b1bef-7bf1-4789-8783-5552386590a5\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.725776 4707 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d7b1bef-7bf1-4789-8783-5552386590a5-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.725795 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/748f7b8f-ec6a-4a9d-a3c6-41a838028863-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "748f7b8f-ec6a-4a9d-a3c6-41a838028863" (UID: "748f7b8f-ec6a-4a9d-a3c6-41a838028863"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.732029 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d7b1bef-7bf1-4789-8783-5552386590a5-kube-api-access-m2md5" (OuterVolumeSpecName: "kube-api-access-m2md5") pod "8d7b1bef-7bf1-4789-8783-5552386590a5" (UID: "8d7b1bef-7bf1-4789-8783-5552386590a5"). InnerVolumeSpecName "kube-api-access-m2md5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.732840 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8d7b1bef-7bf1-4789-8783-5552386590a5" (UID: "8d7b1bef-7bf1-4789-8783-5552386590a5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.732993 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.733047 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/748f7b8f-ec6a-4a9d-a3c6-41a838028863-kube-api-access-897wc" (OuterVolumeSpecName: "kube-api-access-897wc") pod "748f7b8f-ec6a-4a9d-a3c6-41a838028863" (UID: "748f7b8f-ec6a-4a9d-a3c6-41a838028863"). InnerVolumeSpecName "kube-api-access-897wc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.733272 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-scripts" (OuterVolumeSpecName: "scripts") pod "8d7b1bef-7bf1-4789-8783-5552386590a5" (UID: "8d7b1bef-7bf1-4789-8783-5552386590a5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.767230 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.806030 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-config-data" (OuterVolumeSpecName: "config-data") pod "8d7b1bef-7bf1-4789-8783-5552386590a5" (UID: "8d7b1bef-7bf1-4789-8783-5552386590a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.826923 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-logs\") pod \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.826979 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4r6v\" (UniqueName: \"kubernetes.io/projected/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-kube-api-access-s4r6v\") pod \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.827115 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-etc-machine-id\") pod \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.827140 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-config-data\") pod \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.827160 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-scripts\") pod \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.827225 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-config-data-custom\") pod \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\" (UID: \"7facb196-5d86-4e0c-9fad-0fb8fd19cce2\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.827543 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/748f7b8f-ec6a-4a9d-a3c6-41a838028863-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.827559 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-897wc\" (UniqueName: \"kubernetes.io/projected/748f7b8f-ec6a-4a9d-a3c6-41a838028863-kube-api-access-897wc\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.827571 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.827581 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2md5\" (UniqueName: \"kubernetes.io/projected/8d7b1bef-7bf1-4789-8783-5552386590a5-kube-api-access-m2md5\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.827579 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7facb196-5d86-4e0c-9fad-0fb8fd19cce2" (UID: "7facb196-5d86-4e0c-9fad-0fb8fd19cce2"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.827589 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.827617 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d7b1bef-7bf1-4789-8783-5552386590a5-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.828007 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-logs" (OuterVolumeSpecName: "logs") pod "7facb196-5d86-4e0c-9fad-0fb8fd19cce2" (UID: "7facb196-5d86-4e0c-9fad-0fb8fd19cce2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.830745 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-scripts" (OuterVolumeSpecName: "scripts") pod "7facb196-5d86-4e0c-9fad-0fb8fd19cce2" (UID: "7facb196-5d86-4e0c-9fad-0fb8fd19cce2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.830779 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-kube-api-access-s4r6v" (OuterVolumeSpecName: "kube-api-access-s4r6v") pod "7facb196-5d86-4e0c-9fad-0fb8fd19cce2" (UID: "7facb196-5d86-4e0c-9fad-0fb8fd19cce2"). InnerVolumeSpecName "kube-api-access-s4r6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.831623 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7facb196-5d86-4e0c-9fad-0fb8fd19cce2" (UID: "7facb196-5d86-4e0c-9fad-0fb8fd19cce2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.856936 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-config-data" (OuterVolumeSpecName: "config-data") pod "7facb196-5d86-4e0c-9fad-0fb8fd19cce2" (UID: "7facb196-5d86-4e0c-9fad-0fb8fd19cce2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.928223 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvv8k\" (UniqueName: \"kubernetes.io/projected/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-kube-api-access-gvv8k\") pod \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.928350 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-etc-machine-id\") pod \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.928407 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-config-data-custom\") pod \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.928482 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-scripts\") pod \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.928527 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" (UID: "f7b3ae99-3b0c-4af0-9677-aebab2a6c0db"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.928968 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-config-data\") pod \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\" (UID: \"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db\") " Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.929555 4707 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.929579 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.929588 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.929596 4707 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.929606 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.929614 4707 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-logs\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.929622 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4r6v\" (UniqueName: \"kubernetes.io/projected/7facb196-5d86-4e0c-9fad-0fb8fd19cce2-kube-api-access-s4r6v\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.931591 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" (UID: "f7b3ae99-3b0c-4af0-9677-aebab2a6c0db"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.931846 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-scripts" (OuterVolumeSpecName: "scripts") pod "f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" (UID: "f7b3ae99-3b0c-4af0-9677-aebab2a6c0db"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.932765 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-kube-api-access-gvv8k" (OuterVolumeSpecName: "kube-api-access-gvv8k") pod "f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" (UID: "f7b3ae99-3b0c-4af0-9677-aebab2a6c0db"). InnerVolumeSpecName "kube-api-access-gvv8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:11 crc kubenswrapper[4707]: I1204 10:01:11.986791 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-config-data" (OuterVolumeSpecName: "config-data") pod "f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" (UID: "f7b3ae99-3b0c-4af0-9677-aebab2a6c0db"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.030566 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.030606 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.030615 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.030629 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvv8k\" (UniqueName: \"kubernetes.io/projected/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db-kube-api-access-gvv8k\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.047545 4707 generic.go:334] "Generic (PLEG): container finished" podID="8d7b1bef-7bf1-4789-8783-5552386590a5" containerID="5aadecca11b44e952be025178b282fb3df063eeb5bf621caa173d1097dc1f9bf" exitCode=0 Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.047629 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.047629 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"8d7b1bef-7bf1-4789-8783-5552386590a5","Type":"ContainerDied","Data":"5aadecca11b44e952be025178b282fb3df063eeb5bf621caa173d1097dc1f9bf"} Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.047743 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"8d7b1bef-7bf1-4789-8783-5552386590a5","Type":"ContainerDied","Data":"735b67224f0c6817032980ca2db85730b5f5a83f59100468b33cc95448ff0dcb"} Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.047765 4707 scope.go:117] "RemoveContainer" containerID="d4fd869bcee883df44d64b0a566760487cf30a191951d126f1bf926e4c62882e" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.052867 4707 generic.go:334] "Generic (PLEG): container finished" podID="7facb196-5d86-4e0c-9fad-0fb8fd19cce2" containerID="7c1c69f436be516d26324399596887dabf212ddc28256aed3c63e429422453dd" exitCode=0 Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.052913 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.053018 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"7facb196-5d86-4e0c-9fad-0fb8fd19cce2","Type":"ContainerDied","Data":"7c1c69f436be516d26324399596887dabf212ddc28256aed3c63e429422453dd"} Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.053690 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"7facb196-5d86-4e0c-9fad-0fb8fd19cce2","Type":"ContainerDied","Data":"0894c5bd81feb117d62c81b064d06460683eb5cb6e593c3f7c590800b7e08c8d"} Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.057175 4707 generic.go:334] "Generic (PLEG): container finished" podID="f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" containerID="2a44d926c263d9d3d22b81186e418cf00cf8c926510f7060572d49a20ffd861b" exitCode=0 Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.057279 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-2" event={"ID":"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db","Type":"ContainerDied","Data":"2a44d926c263d9d3d22b81186e418cf00cf8c926510f7060572d49a20ffd861b"} Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.057319 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-2" event={"ID":"f7b3ae99-3b0c-4af0-9677-aebab2a6c0db","Type":"ContainerDied","Data":"a75946273b9a8bfe84421bd5afe23459f435b1ca3526e8bbf059d833b3971257"} Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.057510 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-2" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.060493 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" event={"ID":"748f7b8f-ec6a-4a9d-a3c6-41a838028863","Type":"ContainerDied","Data":"4e49c0aa7a3ee10141d202a2b63771f9dcac702a7dfd3521b1c435bfb093216a"} Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.060547 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e49c0aa7a3ee10141d202a2b63771f9dcac702a7dfd3521b1c435bfb093216a" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.060547 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila9bcb-account-delete-nlccv" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.078764 4707 scope.go:117] "RemoveContainer" containerID="5aadecca11b44e952be025178b282fb3df063eeb5bf621caa173d1097dc1f9bf" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.095011 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.103078 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.109296 4707 scope.go:117] "RemoveContainer" containerID="d4fd869bcee883df44d64b0a566760487cf30a191951d126f1bf926e4c62882e" Dec 04 10:01:12 crc kubenswrapper[4707]: E1204 10:01:12.110892 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4fd869bcee883df44d64b0a566760487cf30a191951d126f1bf926e4c62882e\": container with ID starting with d4fd869bcee883df44d64b0a566760487cf30a191951d126f1bf926e4c62882e not found: ID does not exist" containerID="d4fd869bcee883df44d64b0a566760487cf30a191951d126f1bf926e4c62882e" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.111075 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4fd869bcee883df44d64b0a566760487cf30a191951d126f1bf926e4c62882e"} err="failed to get container status \"d4fd869bcee883df44d64b0a566760487cf30a191951d126f1bf926e4c62882e\": rpc error: code = NotFound desc = could not find container \"d4fd869bcee883df44d64b0a566760487cf30a191951d126f1bf926e4c62882e\": container with ID starting with d4fd869bcee883df44d64b0a566760487cf30a191951d126f1bf926e4c62882e not found: ID does not exist" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.111180 4707 scope.go:117] "RemoveContainer" containerID="5aadecca11b44e952be025178b282fb3df063eeb5bf621caa173d1097dc1f9bf" Dec 04 10:01:12 crc kubenswrapper[4707]: E1204 10:01:12.111929 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5aadecca11b44e952be025178b282fb3df063eeb5bf621caa173d1097dc1f9bf\": container with ID starting with 5aadecca11b44e952be025178b282fb3df063eeb5bf621caa173d1097dc1f9bf not found: ID does not exist" containerID="5aadecca11b44e952be025178b282fb3df063eeb5bf621caa173d1097dc1f9bf" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.111980 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5aadecca11b44e952be025178b282fb3df063eeb5bf621caa173d1097dc1f9bf"} err="failed to get container status \"5aadecca11b44e952be025178b282fb3df063eeb5bf621caa173d1097dc1f9bf\": rpc error: code = NotFound desc = could not find container \"5aadecca11b44e952be025178b282fb3df063eeb5bf621caa173d1097dc1f9bf\": container with ID starting with 5aadecca11b44e952be025178b282fb3df063eeb5bf621caa173d1097dc1f9bf not found: ID does not exist" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.112016 4707 scope.go:117] "RemoveContainer" containerID="7c1c69f436be516d26324399596887dabf212ddc28256aed3c63e429422453dd" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.118278 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.127476 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.135040 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-2"] Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.166239 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-scheduler-2"] Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.171675 4707 scope.go:117] "RemoveContainer" containerID="c2fcf1b6e43bea6c10ebbd2f9828bad67e83be5b8fcb8f2e18629719f1326c3c" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.186750 4707 scope.go:117] "RemoveContainer" containerID="7c1c69f436be516d26324399596887dabf212ddc28256aed3c63e429422453dd" Dec 04 10:01:12 crc kubenswrapper[4707]: E1204 10:01:12.187262 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c1c69f436be516d26324399596887dabf212ddc28256aed3c63e429422453dd\": container with ID starting with 7c1c69f436be516d26324399596887dabf212ddc28256aed3c63e429422453dd not found: ID does not exist" containerID="7c1c69f436be516d26324399596887dabf212ddc28256aed3c63e429422453dd" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.187304 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c1c69f436be516d26324399596887dabf212ddc28256aed3c63e429422453dd"} err="failed to get container status \"7c1c69f436be516d26324399596887dabf212ddc28256aed3c63e429422453dd\": rpc error: code = NotFound desc = could not find container \"7c1c69f436be516d26324399596887dabf212ddc28256aed3c63e429422453dd\": container with ID starting with 7c1c69f436be516d26324399596887dabf212ddc28256aed3c63e429422453dd not found: ID does not exist" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.187347 4707 scope.go:117] "RemoveContainer" containerID="c2fcf1b6e43bea6c10ebbd2f9828bad67e83be5b8fcb8f2e18629719f1326c3c" Dec 04 10:01:12 crc kubenswrapper[4707]: E1204 10:01:12.187806 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2fcf1b6e43bea6c10ebbd2f9828bad67e83be5b8fcb8f2e18629719f1326c3c\": container with ID starting with c2fcf1b6e43bea6c10ebbd2f9828bad67e83be5b8fcb8f2e18629719f1326c3c not found: ID does not exist" containerID="c2fcf1b6e43bea6c10ebbd2f9828bad67e83be5b8fcb8f2e18629719f1326c3c" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.187858 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2fcf1b6e43bea6c10ebbd2f9828bad67e83be5b8fcb8f2e18629719f1326c3c"} err="failed to get container status \"c2fcf1b6e43bea6c10ebbd2f9828bad67e83be5b8fcb8f2e18629719f1326c3c\": rpc error: code = NotFound desc = could not find container \"c2fcf1b6e43bea6c10ebbd2f9828bad67e83be5b8fcb8f2e18629719f1326c3c\": container with ID starting with c2fcf1b6e43bea6c10ebbd2f9828bad67e83be5b8fcb8f2e18629719f1326c3c not found: ID does not exist" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.187885 4707 scope.go:117] "RemoveContainer" containerID="27f0a9cc5a29f6b54920e0e5fbc8f733b0653c3fee7eb3148a2c54dcbf5ae34a" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.204842 4707 scope.go:117] "RemoveContainer" containerID="2a44d926c263d9d3d22b81186e418cf00cf8c926510f7060572d49a20ffd861b" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.222608 4707 scope.go:117] "RemoveContainer" containerID="27f0a9cc5a29f6b54920e0e5fbc8f733b0653c3fee7eb3148a2c54dcbf5ae34a" Dec 04 10:01:12 crc kubenswrapper[4707]: E1204 10:01:12.223103 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27f0a9cc5a29f6b54920e0e5fbc8f733b0653c3fee7eb3148a2c54dcbf5ae34a\": container with ID starting with 27f0a9cc5a29f6b54920e0e5fbc8f733b0653c3fee7eb3148a2c54dcbf5ae34a not found: ID does not exist" containerID="27f0a9cc5a29f6b54920e0e5fbc8f733b0653c3fee7eb3148a2c54dcbf5ae34a" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.223136 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27f0a9cc5a29f6b54920e0e5fbc8f733b0653c3fee7eb3148a2c54dcbf5ae34a"} err="failed to get container status \"27f0a9cc5a29f6b54920e0e5fbc8f733b0653c3fee7eb3148a2c54dcbf5ae34a\": rpc error: code = NotFound desc = could not find container \"27f0a9cc5a29f6b54920e0e5fbc8f733b0653c3fee7eb3148a2c54dcbf5ae34a\": container with ID starting with 27f0a9cc5a29f6b54920e0e5fbc8f733b0653c3fee7eb3148a2c54dcbf5ae34a not found: ID does not exist" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.223164 4707 scope.go:117] "RemoveContainer" containerID="2a44d926c263d9d3d22b81186e418cf00cf8c926510f7060572d49a20ffd861b" Dec 04 10:01:12 crc kubenswrapper[4707]: E1204 10:01:12.223526 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a44d926c263d9d3d22b81186e418cf00cf8c926510f7060572d49a20ffd861b\": container with ID starting with 2a44d926c263d9d3d22b81186e418cf00cf8c926510f7060572d49a20ffd861b not found: ID does not exist" containerID="2a44d926c263d9d3d22b81186e418cf00cf8c926510f7060572d49a20ffd861b" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.223580 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a44d926c263d9d3d22b81186e418cf00cf8c926510f7060572d49a20ffd861b"} err="failed to get container status \"2a44d926c263d9d3d22b81186e418cf00cf8c926510f7060572d49a20ffd861b\": rpc error: code = NotFound desc = could not find container \"2a44d926c263d9d3d22b81186e418cf00cf8c926510f7060572d49a20ffd861b\": container with ID starting with 2a44d926c263d9d3d22b81186e418cf00cf8c926510f7060572d49a20ffd861b not found: ID does not exist" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.854616 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7facb196-5d86-4e0c-9fad-0fb8fd19cce2" path="/var/lib/kubelet/pods/7facb196-5d86-4e0c-9fad-0fb8fd19cce2/volumes" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.855301 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d7b1bef-7bf1-4789-8783-5552386590a5" path="/var/lib/kubelet/pods/8d7b1bef-7bf1-4789-8783-5552386590a5/volumes" Dec 04 10:01:12 crc kubenswrapper[4707]: I1204 10:01:12.856385 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" path="/var/lib/kubelet/pods/f7b3ae99-3b0c-4af0-9677-aebab2a6c0db/volumes" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.095507 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-db-create-hpsb5"] Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.104154 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-db-create-hpsb5"] Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.128887 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs"] Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.136180 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila9bcb-account-delete-nlccv"] Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.145779 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-9bcb-account-create-update-kqvrs"] Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.150686 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila9bcb-account-delete-nlccv"] Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.187793 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-db-create-k8h5v"] Dec 04 10:01:13 crc kubenswrapper[4707]: E1204 10:01:13.188101 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d7b1bef-7bf1-4789-8783-5552386590a5" containerName="probe" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188125 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d7b1bef-7bf1-4789-8783-5552386590a5" containerName="probe" Dec 04 10:01:13 crc kubenswrapper[4707]: E1204 10:01:13.188146 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" containerName="manila-scheduler" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188154 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" containerName="manila-scheduler" Dec 04 10:01:13 crc kubenswrapper[4707]: E1204 10:01:13.188165 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" containerName="manila-share" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188172 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" containerName="manila-share" Dec 04 10:01:13 crc kubenswrapper[4707]: E1204 10:01:13.188182 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7facb196-5d86-4e0c-9fad-0fb8fd19cce2" containerName="manila-api-log" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188189 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="7facb196-5d86-4e0c-9fad-0fb8fd19cce2" containerName="manila-api-log" Dec 04 10:01:13 crc kubenswrapper[4707]: E1204 10:01:13.188203 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" containerName="probe" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188210 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" containerName="probe" Dec 04 10:01:13 crc kubenswrapper[4707]: E1204 10:01:13.188228 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d7b1bef-7bf1-4789-8783-5552386590a5" containerName="manila-scheduler" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188236 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d7b1bef-7bf1-4789-8783-5552386590a5" containerName="manila-scheduler" Dec 04 10:01:13 crc kubenswrapper[4707]: E1204 10:01:13.188248 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7facb196-5d86-4e0c-9fad-0fb8fd19cce2" containerName="manila-api" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188257 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="7facb196-5d86-4e0c-9fad-0fb8fd19cce2" containerName="manila-api" Dec 04 10:01:13 crc kubenswrapper[4707]: E1204 10:01:13.188266 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" containerName="probe" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188273 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" containerName="probe" Dec 04 10:01:13 crc kubenswrapper[4707]: E1204 10:01:13.188285 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="748f7b8f-ec6a-4a9d-a3c6-41a838028863" containerName="mariadb-account-delete" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188294 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="748f7b8f-ec6a-4a9d-a3c6-41a838028863" containerName="mariadb-account-delete" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188437 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d7b1bef-7bf1-4789-8783-5552386590a5" containerName="probe" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188451 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" containerName="probe" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188457 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="7facb196-5d86-4e0c-9fad-0fb8fd19cce2" containerName="manila-api-log" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188464 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" containerName="manila-scheduler" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188473 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="7facb196-5d86-4e0c-9fad-0fb8fd19cce2" containerName="manila-api" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188480 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7b3ae99-3b0c-4af0-9677-aebab2a6c0db" containerName="probe" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188488 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="748f7b8f-ec6a-4a9d-a3c6-41a838028863" containerName="mariadb-account-delete" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188499 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d7b1bef-7bf1-4789-8783-5552386590a5" containerName="manila-scheduler" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188506 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7cc3e1c-5829-4a8e-9bfd-6829eaca788c" containerName="manila-share" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.188966 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-k8h5v" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.198238 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-create-k8h5v"] Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.299569 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz"] Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.300526 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.302772 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-db-secret" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.305711 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz"] Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.347935 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npbjj\" (UniqueName: \"kubernetes.io/projected/5d84b0f2-1df4-42e0-a47d-87f65c9d27c5-kube-api-access-npbjj\") pod \"manila-db-create-k8h5v\" (UID: \"5d84b0f2-1df4-42e0-a47d-87f65c9d27c5\") " pod="manila-kuttl-tests/manila-db-create-k8h5v" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.348181 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d84b0f2-1df4-42e0-a47d-87f65c9d27c5-operator-scripts\") pod \"manila-db-create-k8h5v\" (UID: \"5d84b0f2-1df4-42e0-a47d-87f65c9d27c5\") " pod="manila-kuttl-tests/manila-db-create-k8h5v" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.449996 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npbjj\" (UniqueName: \"kubernetes.io/projected/5d84b0f2-1df4-42e0-a47d-87f65c9d27c5-kube-api-access-npbjj\") pod \"manila-db-create-k8h5v\" (UID: \"5d84b0f2-1df4-42e0-a47d-87f65c9d27c5\") " pod="manila-kuttl-tests/manila-db-create-k8h5v" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.450073 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/775ebfd2-ec29-4a2a-94cc-d6ed096df7bd-operator-scripts\") pod \"manila-1cdd-account-create-update-qv9sz\" (UID: \"775ebfd2-ec29-4a2a-94cc-d6ed096df7bd\") " pod="manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.450107 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d84b0f2-1df4-42e0-a47d-87f65c9d27c5-operator-scripts\") pod \"manila-db-create-k8h5v\" (UID: \"5d84b0f2-1df4-42e0-a47d-87f65c9d27c5\") " pod="manila-kuttl-tests/manila-db-create-k8h5v" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.450140 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrrpc\" (UniqueName: \"kubernetes.io/projected/775ebfd2-ec29-4a2a-94cc-d6ed096df7bd-kube-api-access-xrrpc\") pod \"manila-1cdd-account-create-update-qv9sz\" (UID: \"775ebfd2-ec29-4a2a-94cc-d6ed096df7bd\") " pod="manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.451101 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d84b0f2-1df4-42e0-a47d-87f65c9d27c5-operator-scripts\") pod \"manila-db-create-k8h5v\" (UID: \"5d84b0f2-1df4-42e0-a47d-87f65c9d27c5\") " pod="manila-kuttl-tests/manila-db-create-k8h5v" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.474605 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npbjj\" (UniqueName: \"kubernetes.io/projected/5d84b0f2-1df4-42e0-a47d-87f65c9d27c5-kube-api-access-npbjj\") pod \"manila-db-create-k8h5v\" (UID: \"5d84b0f2-1df4-42e0-a47d-87f65c9d27c5\") " pod="manila-kuttl-tests/manila-db-create-k8h5v" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.503767 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-k8h5v" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.552036 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/775ebfd2-ec29-4a2a-94cc-d6ed096df7bd-operator-scripts\") pod \"manila-1cdd-account-create-update-qv9sz\" (UID: \"775ebfd2-ec29-4a2a-94cc-d6ed096df7bd\") " pod="manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.552290 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrrpc\" (UniqueName: \"kubernetes.io/projected/775ebfd2-ec29-4a2a-94cc-d6ed096df7bd-kube-api-access-xrrpc\") pod \"manila-1cdd-account-create-update-qv9sz\" (UID: \"775ebfd2-ec29-4a2a-94cc-d6ed096df7bd\") " pod="manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.552923 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/775ebfd2-ec29-4a2a-94cc-d6ed096df7bd-operator-scripts\") pod \"manila-1cdd-account-create-update-qv9sz\" (UID: \"775ebfd2-ec29-4a2a-94cc-d6ed096df7bd\") " pod="manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.579052 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrrpc\" (UniqueName: \"kubernetes.io/projected/775ebfd2-ec29-4a2a-94cc-d6ed096df7bd-kube-api-access-xrrpc\") pod \"manila-1cdd-account-create-update-qv9sz\" (UID: \"775ebfd2-ec29-4a2a-94cc-d6ed096df7bd\") " pod="manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.615685 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.725841 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.765868 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-create-k8h5v"] Dec 04 10:01:13 crc kubenswrapper[4707]: W1204 10:01:13.770976 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d84b0f2_1df4_42e0_a47d_87f65c9d27c5.slice/crio-1d385d441460fd031b6716373ec54305bf30231478e2f2aa9e5f4c3d1f726ca6 WatchSource:0}: Error finding container 1d385d441460fd031b6716373ec54305bf30231478e2f2aa9e5f4c3d1f726ca6: Status 404 returned error can't find the container with id 1d385d441460fd031b6716373ec54305bf30231478e2f2aa9e5f4c3d1f726ca6 Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.857477 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-config-data-custom\") pod \"c712367d-523c-41a8-9b9f-d9644cda0c26\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.857820 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-config-data\") pod \"c712367d-523c-41a8-9b9f-d9644cda0c26\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.857965 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c712367d-523c-41a8-9b9f-d9644cda0c26-etc-machine-id\") pod \"c712367d-523c-41a8-9b9f-d9644cda0c26\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.857996 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-scripts\") pod \"c712367d-523c-41a8-9b9f-d9644cda0c26\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.858091 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nq9t4\" (UniqueName: \"kubernetes.io/projected/c712367d-523c-41a8-9b9f-d9644cda0c26-kube-api-access-nq9t4\") pod \"c712367d-523c-41a8-9b9f-d9644cda0c26\" (UID: \"c712367d-523c-41a8-9b9f-d9644cda0c26\") " Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.859034 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c712367d-523c-41a8-9b9f-d9644cda0c26-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "c712367d-523c-41a8-9b9f-d9644cda0c26" (UID: "c712367d-523c-41a8-9b9f-d9644cda0c26"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.866177 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-scripts" (OuterVolumeSpecName: "scripts") pod "c712367d-523c-41a8-9b9f-d9644cda0c26" (UID: "c712367d-523c-41a8-9b9f-d9644cda0c26"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.867584 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c712367d-523c-41a8-9b9f-d9644cda0c26" (UID: "c712367d-523c-41a8-9b9f-d9644cda0c26"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.868853 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c712367d-523c-41a8-9b9f-d9644cda0c26-kube-api-access-nq9t4" (OuterVolumeSpecName: "kube-api-access-nq9t4") pod "c712367d-523c-41a8-9b9f-d9644cda0c26" (UID: "c712367d-523c-41a8-9b9f-d9644cda0c26"). InnerVolumeSpecName "kube-api-access-nq9t4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.934563 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-config-data" (OuterVolumeSpecName: "config-data") pod "c712367d-523c-41a8-9b9f-d9644cda0c26" (UID: "c712367d-523c-41a8-9b9f-d9644cda0c26"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.959626 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.959673 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.959685 4707 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c712367d-523c-41a8-9b9f-d9644cda0c26-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.959697 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c712367d-523c-41a8-9b9f-d9644cda0c26-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:13 crc kubenswrapper[4707]: I1204 10:01:13.959709 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nq9t4\" (UniqueName: \"kubernetes.io/projected/c712367d-523c-41a8-9b9f-d9644cda0c26-kube-api-access-nq9t4\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.060695 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz"] Dec 04 10:01:14 crc kubenswrapper[4707]: W1204 10:01:14.063321 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod775ebfd2_ec29_4a2a_94cc_d6ed096df7bd.slice/crio-812df9e13e2ab2b198dc08ecdbb7279fc705788b6ad47b0504f044fb7ab418e8 WatchSource:0}: Error finding container 812df9e13e2ab2b198dc08ecdbb7279fc705788b6ad47b0504f044fb7ab418e8: Status 404 returned error can't find the container with id 812df9e13e2ab2b198dc08ecdbb7279fc705788b6ad47b0504f044fb7ab418e8 Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.081869 4707 generic.go:334] "Generic (PLEG): container finished" podID="c712367d-523c-41a8-9b9f-d9644cda0c26" containerID="2912e7adbbf77c41861d61d3ab42cfd1ce8ae655c3b45ae51df29dfc9d49f761" exitCode=0 Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.081961 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-1" event={"ID":"c712367d-523c-41a8-9b9f-d9644cda0c26","Type":"ContainerDied","Data":"2912e7adbbf77c41861d61d3ab42cfd1ce8ae655c3b45ae51df29dfc9d49f761"} Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.082023 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-1" event={"ID":"c712367d-523c-41a8-9b9f-d9644cda0c26","Type":"ContainerDied","Data":"60afd4d61bd83b32fb00210ab034f16ba2f927daa5488b91a3609a9143f16d23"} Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.082045 4707 scope.go:117] "RemoveContainer" containerID="43ce014d59bd213df9a2315a82826832aaf5cd00d537923234d1740d9c3e9da1" Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.082203 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-1" Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.084445 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz" event={"ID":"775ebfd2-ec29-4a2a-94cc-d6ed096df7bd","Type":"ContainerStarted","Data":"812df9e13e2ab2b198dc08ecdbb7279fc705788b6ad47b0504f044fb7ab418e8"} Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.086795 4707 generic.go:334] "Generic (PLEG): container finished" podID="5d84b0f2-1df4-42e0-a47d-87f65c9d27c5" containerID="27e81c0890a27226fd777a1a811bd0713e1ec2d4121f428f752db37af6237c39" exitCode=0 Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.086836 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-k8h5v" event={"ID":"5d84b0f2-1df4-42e0-a47d-87f65c9d27c5","Type":"ContainerDied","Data":"27e81c0890a27226fd777a1a811bd0713e1ec2d4121f428f752db37af6237c39"} Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.086886 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-k8h5v" event={"ID":"5d84b0f2-1df4-42e0-a47d-87f65c9d27c5","Type":"ContainerStarted","Data":"1d385d441460fd031b6716373ec54305bf30231478e2f2aa9e5f4c3d1f726ca6"} Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.138310 4707 scope.go:117] "RemoveContainer" containerID="2912e7adbbf77c41861d61d3ab42cfd1ce8ae655c3b45ae51df29dfc9d49f761" Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.181166 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-1"] Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.188195 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-scheduler-1"] Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.189257 4707 scope.go:117] "RemoveContainer" containerID="43ce014d59bd213df9a2315a82826832aaf5cd00d537923234d1740d9c3e9da1" Dec 04 10:01:14 crc kubenswrapper[4707]: E1204 10:01:14.190031 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43ce014d59bd213df9a2315a82826832aaf5cd00d537923234d1740d9c3e9da1\": container with ID starting with 43ce014d59bd213df9a2315a82826832aaf5cd00d537923234d1740d9c3e9da1 not found: ID does not exist" containerID="43ce014d59bd213df9a2315a82826832aaf5cd00d537923234d1740d9c3e9da1" Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.190079 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43ce014d59bd213df9a2315a82826832aaf5cd00d537923234d1740d9c3e9da1"} err="failed to get container status \"43ce014d59bd213df9a2315a82826832aaf5cd00d537923234d1740d9c3e9da1\": rpc error: code = NotFound desc = could not find container \"43ce014d59bd213df9a2315a82826832aaf5cd00d537923234d1740d9c3e9da1\": container with ID starting with 43ce014d59bd213df9a2315a82826832aaf5cd00d537923234d1740d9c3e9da1 not found: ID does not exist" Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.190117 4707 scope.go:117] "RemoveContainer" containerID="2912e7adbbf77c41861d61d3ab42cfd1ce8ae655c3b45ae51df29dfc9d49f761" Dec 04 10:01:14 crc kubenswrapper[4707]: E1204 10:01:14.191430 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2912e7adbbf77c41861d61d3ab42cfd1ce8ae655c3b45ae51df29dfc9d49f761\": container with ID starting with 2912e7adbbf77c41861d61d3ab42cfd1ce8ae655c3b45ae51df29dfc9d49f761 not found: ID does not exist" containerID="2912e7adbbf77c41861d61d3ab42cfd1ce8ae655c3b45ae51df29dfc9d49f761" Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.191524 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2912e7adbbf77c41861d61d3ab42cfd1ce8ae655c3b45ae51df29dfc9d49f761"} err="failed to get container status \"2912e7adbbf77c41861d61d3ab42cfd1ce8ae655c3b45ae51df29dfc9d49f761\": rpc error: code = NotFound desc = could not find container \"2912e7adbbf77c41861d61d3ab42cfd1ce8ae655c3b45ae51df29dfc9d49f761\": container with ID starting with 2912e7adbbf77c41861d61d3ab42cfd1ce8ae655c3b45ae51df29dfc9d49f761 not found: ID does not exist" Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.853065 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="748f7b8f-ec6a-4a9d-a3c6-41a838028863" path="/var/lib/kubelet/pods/748f7b8f-ec6a-4a9d-a3c6-41a838028863/volumes" Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.853800 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fc71a77-6268-43a1-b4a2-322b2016e6bc" path="/var/lib/kubelet/pods/9fc71a77-6268-43a1-b4a2-322b2016e6bc/volumes" Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.854259 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad9bc849-2e93-44b6-81de-5b72a9c1e0eb" path="/var/lib/kubelet/pods/ad9bc849-2e93-44b6-81de-5b72a9c1e0eb/volumes" Dec 04 10:01:14 crc kubenswrapper[4707]: I1204 10:01:14.854747 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c712367d-523c-41a8-9b9f-d9644cda0c26" path="/var/lib/kubelet/pods/c712367d-523c-41a8-9b9f-d9644cda0c26/volumes" Dec 04 10:01:15 crc kubenswrapper[4707]: I1204 10:01:15.096548 4707 generic.go:334] "Generic (PLEG): container finished" podID="775ebfd2-ec29-4a2a-94cc-d6ed096df7bd" containerID="da9f950d03719060d2813b8b17feb8723c54f1d8fa7271b26ccf944e1e778962" exitCode=0 Dec 04 10:01:15 crc kubenswrapper[4707]: I1204 10:01:15.096667 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz" event={"ID":"775ebfd2-ec29-4a2a-94cc-d6ed096df7bd","Type":"ContainerDied","Data":"da9f950d03719060d2813b8b17feb8723c54f1d8fa7271b26ccf944e1e778962"} Dec 04 10:01:15 crc kubenswrapper[4707]: I1204 10:01:15.422088 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-k8h5v" Dec 04 10:01:15 crc kubenswrapper[4707]: I1204 10:01:15.595445 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d84b0f2-1df4-42e0-a47d-87f65c9d27c5-operator-scripts\") pod \"5d84b0f2-1df4-42e0-a47d-87f65c9d27c5\" (UID: \"5d84b0f2-1df4-42e0-a47d-87f65c9d27c5\") " Dec 04 10:01:15 crc kubenswrapper[4707]: I1204 10:01:15.595570 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npbjj\" (UniqueName: \"kubernetes.io/projected/5d84b0f2-1df4-42e0-a47d-87f65c9d27c5-kube-api-access-npbjj\") pod \"5d84b0f2-1df4-42e0-a47d-87f65c9d27c5\" (UID: \"5d84b0f2-1df4-42e0-a47d-87f65c9d27c5\") " Dec 04 10:01:15 crc kubenswrapper[4707]: I1204 10:01:15.596393 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d84b0f2-1df4-42e0-a47d-87f65c9d27c5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5d84b0f2-1df4-42e0-a47d-87f65c9d27c5" (UID: "5d84b0f2-1df4-42e0-a47d-87f65c9d27c5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:01:15 crc kubenswrapper[4707]: I1204 10:01:15.601539 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d84b0f2-1df4-42e0-a47d-87f65c9d27c5-kube-api-access-npbjj" (OuterVolumeSpecName: "kube-api-access-npbjj") pod "5d84b0f2-1df4-42e0-a47d-87f65c9d27c5" (UID: "5d84b0f2-1df4-42e0-a47d-87f65c9d27c5"). InnerVolumeSpecName "kube-api-access-npbjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:15 crc kubenswrapper[4707]: I1204 10:01:15.697493 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d84b0f2-1df4-42e0-a47d-87f65c9d27c5-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:15 crc kubenswrapper[4707]: I1204 10:01:15.697530 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npbjj\" (UniqueName: \"kubernetes.io/projected/5d84b0f2-1df4-42e0-a47d-87f65c9d27c5-kube-api-access-npbjj\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:16 crc kubenswrapper[4707]: I1204 10:01:16.106677 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-k8h5v" event={"ID":"5d84b0f2-1df4-42e0-a47d-87f65c9d27c5","Type":"ContainerDied","Data":"1d385d441460fd031b6716373ec54305bf30231478e2f2aa9e5f4c3d1f726ca6"} Dec 04 10:01:16 crc kubenswrapper[4707]: I1204 10:01:16.106748 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d385d441460fd031b6716373ec54305bf30231478e2f2aa9e5f4c3d1f726ca6" Dec 04 10:01:16 crc kubenswrapper[4707]: I1204 10:01:16.106696 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-k8h5v" Dec 04 10:01:16 crc kubenswrapper[4707]: I1204 10:01:16.360119 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz" Dec 04 10:01:16 crc kubenswrapper[4707]: I1204 10:01:16.509471 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/775ebfd2-ec29-4a2a-94cc-d6ed096df7bd-operator-scripts\") pod \"775ebfd2-ec29-4a2a-94cc-d6ed096df7bd\" (UID: \"775ebfd2-ec29-4a2a-94cc-d6ed096df7bd\") " Dec 04 10:01:16 crc kubenswrapper[4707]: I1204 10:01:16.509730 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrrpc\" (UniqueName: \"kubernetes.io/projected/775ebfd2-ec29-4a2a-94cc-d6ed096df7bd-kube-api-access-xrrpc\") pod \"775ebfd2-ec29-4a2a-94cc-d6ed096df7bd\" (UID: \"775ebfd2-ec29-4a2a-94cc-d6ed096df7bd\") " Dec 04 10:01:16 crc kubenswrapper[4707]: I1204 10:01:16.510143 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/775ebfd2-ec29-4a2a-94cc-d6ed096df7bd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "775ebfd2-ec29-4a2a-94cc-d6ed096df7bd" (UID: "775ebfd2-ec29-4a2a-94cc-d6ed096df7bd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:01:16 crc kubenswrapper[4707]: I1204 10:01:16.512305 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/775ebfd2-ec29-4a2a-94cc-d6ed096df7bd-kube-api-access-xrrpc" (OuterVolumeSpecName: "kube-api-access-xrrpc") pod "775ebfd2-ec29-4a2a-94cc-d6ed096df7bd" (UID: "775ebfd2-ec29-4a2a-94cc-d6ed096df7bd"). InnerVolumeSpecName "kube-api-access-xrrpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:16 crc kubenswrapper[4707]: I1204 10:01:16.611752 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/775ebfd2-ec29-4a2a-94cc-d6ed096df7bd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:16 crc kubenswrapper[4707]: I1204 10:01:16.611787 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrrpc\" (UniqueName: \"kubernetes.io/projected/775ebfd2-ec29-4a2a-94cc-d6ed096df7bd-kube-api-access-xrrpc\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:17 crc kubenswrapper[4707]: I1204 10:01:17.114282 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz" event={"ID":"775ebfd2-ec29-4a2a-94cc-d6ed096df7bd","Type":"ContainerDied","Data":"812df9e13e2ab2b198dc08ecdbb7279fc705788b6ad47b0504f044fb7ab418e8"} Dec 04 10:01:17 crc kubenswrapper[4707]: I1204 10:01:17.114649 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="812df9e13e2ab2b198dc08ecdbb7279fc705788b6ad47b0504f044fb7ab418e8" Dec 04 10:01:17 crc kubenswrapper[4707]: I1204 10:01:17.114360 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.522637 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-db-sync-x499w"] Dec 04 10:01:18 crc kubenswrapper[4707]: E1204 10:01:18.523253 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c712367d-523c-41a8-9b9f-d9644cda0c26" containerName="probe" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.523276 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="c712367d-523c-41a8-9b9f-d9644cda0c26" containerName="probe" Dec 04 10:01:18 crc kubenswrapper[4707]: E1204 10:01:18.523326 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c712367d-523c-41a8-9b9f-d9644cda0c26" containerName="manila-scheduler" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.523359 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="c712367d-523c-41a8-9b9f-d9644cda0c26" containerName="manila-scheduler" Dec 04 10:01:18 crc kubenswrapper[4707]: E1204 10:01:18.523369 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d84b0f2-1df4-42e0-a47d-87f65c9d27c5" containerName="mariadb-database-create" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.523379 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d84b0f2-1df4-42e0-a47d-87f65c9d27c5" containerName="mariadb-database-create" Dec 04 10:01:18 crc kubenswrapper[4707]: E1204 10:01:18.523400 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="775ebfd2-ec29-4a2a-94cc-d6ed096df7bd" containerName="mariadb-account-create-update" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.523410 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="775ebfd2-ec29-4a2a-94cc-d6ed096df7bd" containerName="mariadb-account-create-update" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.523632 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="775ebfd2-ec29-4a2a-94cc-d6ed096df7bd" containerName="mariadb-account-create-update" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.523654 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="c712367d-523c-41a8-9b9f-d9644cda0c26" containerName="probe" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.523671 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d84b0f2-1df4-42e0-a47d-87f65c9d27c5" containerName="mariadb-database-create" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.523683 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="c712367d-523c-41a8-9b9f-d9644cda0c26" containerName="manila-scheduler" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.524654 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.527284 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"combined-ca-bundle" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.527476 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-manila-dockercfg-bthz4" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.528743 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-config-data" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.536421 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-sync-x499w"] Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.641494 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-combined-ca-bundle\") pod \"manila-db-sync-x499w\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.641557 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x45tc\" (UniqueName: \"kubernetes.io/projected/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-kube-api-access-x45tc\") pod \"manila-db-sync-x499w\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.641613 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-config-data\") pod \"manila-db-sync-x499w\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.641642 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-job-config-data\") pod \"manila-db-sync-x499w\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.743211 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-config-data\") pod \"manila-db-sync-x499w\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.744087 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-job-config-data\") pod \"manila-db-sync-x499w\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.744200 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-combined-ca-bundle\") pod \"manila-db-sync-x499w\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.744305 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x45tc\" (UniqueName: \"kubernetes.io/projected/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-kube-api-access-x45tc\") pod \"manila-db-sync-x499w\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.748064 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-combined-ca-bundle\") pod \"manila-db-sync-x499w\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.748164 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-job-config-data\") pod \"manila-db-sync-x499w\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.748736 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-config-data\") pod \"manila-db-sync-x499w\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.765667 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x45tc\" (UniqueName: \"kubernetes.io/projected/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-kube-api-access-x45tc\") pod \"manila-db-sync-x499w\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:18 crc kubenswrapper[4707]: I1204 10:01:18.856118 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:19 crc kubenswrapper[4707]: I1204 10:01:19.263292 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-sync-x499w"] Dec 04 10:01:20 crc kubenswrapper[4707]: I1204 10:01:20.132909 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-x499w" event={"ID":"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824","Type":"ContainerStarted","Data":"67f876de9692bd20cf6cffd99fde2450a9fbb10f390ea55ee528a2b93c1aeb10"} Dec 04 10:01:20 crc kubenswrapper[4707]: I1204 10:01:20.133295 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-x499w" event={"ID":"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824","Type":"ContainerStarted","Data":"2d21a0c2d43402f15df2e273694810fa60cdf90fcca31f2cbc17337d55a58c75"} Dec 04 10:01:20 crc kubenswrapper[4707]: I1204 10:01:20.149894 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-db-sync-x499w" podStartSLOduration=2.14987407 podStartE2EDuration="2.14987407s" podCreationTimestamp="2025-12-04 10:01:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:01:20.146366181 +0000 UTC m=+1379.582188698" watchObservedRunningTime="2025-12-04 10:01:20.14987407 +0000 UTC m=+1379.585696577" Dec 04 10:01:22 crc kubenswrapper[4707]: I1204 10:01:22.147669 4707 generic.go:334] "Generic (PLEG): container finished" podID="214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824" containerID="67f876de9692bd20cf6cffd99fde2450a9fbb10f390ea55ee528a2b93c1aeb10" exitCode=0 Dec 04 10:01:22 crc kubenswrapper[4707]: I1204 10:01:22.147711 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-x499w" event={"ID":"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824","Type":"ContainerDied","Data":"67f876de9692bd20cf6cffd99fde2450a9fbb10f390ea55ee528a2b93c1aeb10"} Dec 04 10:01:23 crc kubenswrapper[4707]: I1204 10:01:23.421512 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:23 crc kubenswrapper[4707]: I1204 10:01:23.613423 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-job-config-data\") pod \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " Dec 04 10:01:23 crc kubenswrapper[4707]: I1204 10:01:23.613536 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-combined-ca-bundle\") pod \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " Dec 04 10:01:23 crc kubenswrapper[4707]: I1204 10:01:23.614350 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-config-data\") pod \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " Dec 04 10:01:23 crc kubenswrapper[4707]: I1204 10:01:23.614512 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x45tc\" (UniqueName: \"kubernetes.io/projected/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-kube-api-access-x45tc\") pod \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\" (UID: \"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824\") " Dec 04 10:01:23 crc kubenswrapper[4707]: I1204 10:01:23.618735 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824" (UID: "214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:23 crc kubenswrapper[4707]: I1204 10:01:23.618763 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-kube-api-access-x45tc" (OuterVolumeSpecName: "kube-api-access-x45tc") pod "214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824" (UID: "214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824"). InnerVolumeSpecName "kube-api-access-x45tc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:23 crc kubenswrapper[4707]: I1204 10:01:23.621522 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-config-data" (OuterVolumeSpecName: "config-data") pod "214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824" (UID: "214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:23 crc kubenswrapper[4707]: I1204 10:01:23.633508 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824" (UID: "214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:23 crc kubenswrapper[4707]: I1204 10:01:23.716687 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x45tc\" (UniqueName: \"kubernetes.io/projected/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-kube-api-access-x45tc\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:23 crc kubenswrapper[4707]: I1204 10:01:23.716723 4707 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-job-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:23 crc kubenswrapper[4707]: I1204 10:01:23.716732 4707 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:23 crc kubenswrapper[4707]: I1204 10:01:23.716742 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.165321 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-x499w" event={"ID":"214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824","Type":"ContainerDied","Data":"2d21a0c2d43402f15df2e273694810fa60cdf90fcca31f2cbc17337d55a58c75"} Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.165389 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d21a0c2d43402f15df2e273694810fa60cdf90fcca31f2cbc17337d55a58c75" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.165392 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-x499w" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.415304 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:01:24 crc kubenswrapper[4707]: E1204 10:01:24.415635 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824" containerName="manila-db-sync" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.415648 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824" containerName="manila-db-sync" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.415779 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824" containerName="manila-db-sync" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.416447 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.421102 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-config-data" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.421484 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-scripts" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.422953 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"combined-ca-bundle" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.423138 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-scheduler-config-data" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.423196 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-manila-dockercfg-bthz4" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.427156 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.428404 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.433009 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-share-share0-config-data" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.438830 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.442942 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"ceph-conf-files" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.456626 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.526682 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-config-data\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.526731 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-scripts\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.526767 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.526877 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxtcb\" (UniqueName: \"kubernetes.io/projected/8972074a-5f24-4c51-a83f-787c390d835f-kube-api-access-xxtcb\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.527023 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/955e4666-6ccc-4432-932a-5fc0801cb0f1-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.527070 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l2m2\" (UniqueName: \"kubernetes.io/projected/955e4666-6ccc-4432-932a-5fc0801cb0f1-kube-api-access-7l2m2\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.527138 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/955e4666-6ccc-4432-932a-5fc0801cb0f1-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.527170 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.527241 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-combined-ca-bundle\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.527288 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-config-data\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.527370 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-ceph\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.527411 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-scripts\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.527499 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8972074a-5f24-4c51-a83f-787c390d835f-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.527552 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.619881 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.620872 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.623757 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-api-config-data" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.627128 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"cert-manila-internal-svc" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.628465 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/955e4666-6ccc-4432-932a-5fc0801cb0f1-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.628520 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l2m2\" (UniqueName: \"kubernetes.io/projected/955e4666-6ccc-4432-932a-5fc0801cb0f1-kube-api-access-7l2m2\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.628559 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/955e4666-6ccc-4432-932a-5fc0801cb0f1-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.628584 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.628584 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/955e4666-6ccc-4432-932a-5fc0801cb0f1-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.628617 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-combined-ca-bundle\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.628644 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-config-data\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.628675 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-ceph\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.629532 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-scripts\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.629861 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8972074a-5f24-4c51-a83f-787c390d835f-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.629885 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.629947 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-config-data\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.629969 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-scripts\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.629994 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.630016 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxtcb\" (UniqueName: \"kubernetes.io/projected/8972074a-5f24-4c51-a83f-787c390d835f-kube-api-access-xxtcb\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.628725 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/955e4666-6ccc-4432-932a-5fc0801cb0f1-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.633801 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8972074a-5f24-4c51-a83f-787c390d835f-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.636800 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"cert-manila-public-svc" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.646410 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-ceph\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.646504 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-scripts\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.646661 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.646653 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-config-data\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.647230 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-config-data\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.647629 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-combined-ca-bundle\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.648036 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.654234 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.669997 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.670785 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-scripts\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.676772 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l2m2\" (UniqueName: \"kubernetes.io/projected/955e4666-6ccc-4432-932a-5fc0801cb0f1-kube-api-access-7l2m2\") pod \"manila-share-share0-0\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.678168 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxtcb\" (UniqueName: \"kubernetes.io/projected/8972074a-5f24-4c51-a83f-787c390d835f-kube-api-access-xxtcb\") pod \"manila-scheduler-0\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.731767 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-internal-tls-certs\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.731830 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-scripts\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.731864 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-logs\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.731915 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvvb8\" (UniqueName: \"kubernetes.io/projected/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-kube-api-access-lvvb8\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.731954 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.731999 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-public-tls-certs\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.732040 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-config-data\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.732075 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-etc-machine-id\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.732097 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-config-data-custom\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.733438 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.748870 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.833460 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvvb8\" (UniqueName: \"kubernetes.io/projected/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-kube-api-access-lvvb8\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.833532 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.833581 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-public-tls-certs\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.833612 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-config-data\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.833658 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-etc-machine-id\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.833687 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-config-data-custom\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.833724 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-internal-tls-certs\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.833758 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-scripts\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.833793 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-logs\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.834234 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-logs\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.835890 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-etc-machine-id\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.839070 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-public-tls-certs\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.841125 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-config-data\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.841805 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-config-data-custom\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.846051 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-internal-tls-certs\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.854107 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-scripts\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.855484 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:24 crc kubenswrapper[4707]: I1204 10:01:24.880613 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvvb8\" (UniqueName: \"kubernetes.io/projected/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-kube-api-access-lvvb8\") pod \"manila-api-0\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:25 crc kubenswrapper[4707]: I1204 10:01:25.015041 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:25 crc kubenswrapper[4707]: I1204 10:01:25.065378 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:01:25 crc kubenswrapper[4707]: I1204 10:01:25.112036 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:01:25 crc kubenswrapper[4707]: W1204 10:01:25.123121 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod955e4666_6ccc_4432_932a_5fc0801cb0f1.slice/crio-36e083308874bb8d09d71bbbbc6cd77f0a08d4c890c2a049db7f790915706a15 WatchSource:0}: Error finding container 36e083308874bb8d09d71bbbbc6cd77f0a08d4c890c2a049db7f790915706a15: Status 404 returned error can't find the container with id 36e083308874bb8d09d71bbbbc6cd77f0a08d4c890c2a049db7f790915706a15 Dec 04 10:01:25 crc kubenswrapper[4707]: I1204 10:01:25.181974 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"8972074a-5f24-4c51-a83f-787c390d835f","Type":"ContainerStarted","Data":"8a2f8b11542b16568394b131f64a6ec171eb35ae18f39901da3667f78e1ce687"} Dec 04 10:01:25 crc kubenswrapper[4707]: I1204 10:01:25.183874 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"955e4666-6ccc-4432-932a-5fc0801cb0f1","Type":"ContainerStarted","Data":"36e083308874bb8d09d71bbbbc6cd77f0a08d4c890c2a049db7f790915706a15"} Dec 04 10:01:25 crc kubenswrapper[4707]: I1204 10:01:25.304780 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:01:25 crc kubenswrapper[4707]: W1204 10:01:25.309380 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae811026_c4eb_4277_b8bc_1ddb48cb97cb.slice/crio-d26b322404e61edfbd8f4ec594f0d0f35589c77f9f8f85be8b33260c06d99fc0 WatchSource:0}: Error finding container d26b322404e61edfbd8f4ec594f0d0f35589c77f9f8f85be8b33260c06d99fc0: Status 404 returned error can't find the container with id d26b322404e61edfbd8f4ec594f0d0f35589c77f9f8f85be8b33260c06d99fc0 Dec 04 10:01:26 crc kubenswrapper[4707]: I1204 10:01:26.198502 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"8972074a-5f24-4c51-a83f-787c390d835f","Type":"ContainerStarted","Data":"6cb8f6c8895cec43da3b786d8b08de07afe804de97d8ef27c2ff99e3db78c795"} Dec 04 10:01:26 crc kubenswrapper[4707]: I1204 10:01:26.198961 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"8972074a-5f24-4c51-a83f-787c390d835f","Type":"ContainerStarted","Data":"8d2ac2de9e802ec5ee53d924dfd1786cf940a3bafcaca70e7abe9c17753a7972"} Dec 04 10:01:26 crc kubenswrapper[4707]: I1204 10:01:26.206828 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"955e4666-6ccc-4432-932a-5fc0801cb0f1","Type":"ContainerStarted","Data":"93b21ad93edc160d370b9757259cee643495874712d7864a6e47288f23ea7df3"} Dec 04 10:01:26 crc kubenswrapper[4707]: I1204 10:01:26.209320 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"ae811026-c4eb-4277-b8bc-1ddb48cb97cb","Type":"ContainerStarted","Data":"2f8b3d23dcc12fc484e2275297c55a96f5324a35a55a3e3686328ecfccbcfa3b"} Dec 04 10:01:26 crc kubenswrapper[4707]: I1204 10:01:26.209387 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"ae811026-c4eb-4277-b8bc-1ddb48cb97cb","Type":"ContainerStarted","Data":"d26b322404e61edfbd8f4ec594f0d0f35589c77f9f8f85be8b33260c06d99fc0"} Dec 04 10:01:26 crc kubenswrapper[4707]: I1204 10:01:26.232194 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-scheduler-0" podStartSLOduration=2.232168301 podStartE2EDuration="2.232168301s" podCreationTimestamp="2025-12-04 10:01:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:01:26.22988276 +0000 UTC m=+1385.665705267" watchObservedRunningTime="2025-12-04 10:01:26.232168301 +0000 UTC m=+1385.667990808" Dec 04 10:01:27 crc kubenswrapper[4707]: I1204 10:01:27.222814 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"955e4666-6ccc-4432-932a-5fc0801cb0f1","Type":"ContainerStarted","Data":"71a8839a0a658f8fb1693b423cb614031cc6a967613267eab313935b797c821b"} Dec 04 10:01:27 crc kubenswrapper[4707]: I1204 10:01:27.225217 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"ae811026-c4eb-4277-b8bc-1ddb48cb97cb","Type":"ContainerStarted","Data":"2346ce8f8ee8327db4867e05a2ac0a792f9b2f994a1668fccdb8d9022ec5de34"} Dec 04 10:01:27 crc kubenswrapper[4707]: I1204 10:01:27.257567 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-share-share0-0" podStartSLOduration=3.257542444 podStartE2EDuration="3.257542444s" podCreationTimestamp="2025-12-04 10:01:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:01:27.252920011 +0000 UTC m=+1386.688742518" watchObservedRunningTime="2025-12-04 10:01:27.257542444 +0000 UTC m=+1386.693364951" Dec 04 10:01:27 crc kubenswrapper[4707]: I1204 10:01:27.274793 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-api-0" podStartSLOduration=3.274771701 podStartE2EDuration="3.274771701s" podCreationTimestamp="2025-12-04 10:01:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:01:27.274264205 +0000 UTC m=+1386.710086712" watchObservedRunningTime="2025-12-04 10:01:27.274771701 +0000 UTC m=+1386.710594218" Dec 04 10:01:28 crc kubenswrapper[4707]: I1204 10:01:28.232598 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:34 crc kubenswrapper[4707]: I1204 10:01:34.734483 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:34 crc kubenswrapper[4707]: I1204 10:01:34.749784 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:46 crc kubenswrapper[4707]: I1204 10:01:46.424830 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:46 crc kubenswrapper[4707]: I1204 10:01:46.548776 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:46 crc kubenswrapper[4707]: I1204 10:01:46.555104 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:47 crc kubenswrapper[4707]: I1204 10:01:47.841314 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-db-sync-x499w"] Dec 04 10:01:47 crc kubenswrapper[4707]: I1204 10:01:47.850437 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-db-sync-x499w"] Dec 04 10:01:47 crc kubenswrapper[4707]: I1204 10:01:47.865507 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:01:47 crc kubenswrapper[4707]: I1204 10:01:47.865774 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share0-0" podUID="955e4666-6ccc-4432-932a-5fc0801cb0f1" containerName="manila-share" containerID="cri-o://93b21ad93edc160d370b9757259cee643495874712d7864a6e47288f23ea7df3" gracePeriod=30 Dec 04 10:01:47 crc kubenswrapper[4707]: I1204 10:01:47.865846 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share0-0" podUID="955e4666-6ccc-4432-932a-5fc0801cb0f1" containerName="probe" containerID="cri-o://71a8839a0a658f8fb1693b423cb614031cc6a967613267eab313935b797c821b" gracePeriod=30 Dec 04 10:01:47 crc kubenswrapper[4707]: I1204 10:01:47.871686 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:01:47 crc kubenswrapper[4707]: I1204 10:01:47.871905 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-0" podUID="8972074a-5f24-4c51-a83f-787c390d835f" containerName="manila-scheduler" containerID="cri-o://8d2ac2de9e802ec5ee53d924dfd1786cf940a3bafcaca70e7abe9c17753a7972" gracePeriod=30 Dec 04 10:01:47 crc kubenswrapper[4707]: I1204 10:01:47.872043 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-0" podUID="8972074a-5f24-4c51-a83f-787c390d835f" containerName="probe" containerID="cri-o://6cb8f6c8895cec43da3b786d8b08de07afe804de97d8ef27c2ff99e3db78c795" gracePeriod=30 Dec 04 10:01:47 crc kubenswrapper[4707]: I1204 10:01:47.916933 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:01:47 crc kubenswrapper[4707]: I1204 10:01:47.917207 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-0" podUID="ae811026-c4eb-4277-b8bc-1ddb48cb97cb" containerName="manila-api-log" containerID="cri-o://2f8b3d23dcc12fc484e2275297c55a96f5324a35a55a3e3686328ecfccbcfa3b" gracePeriod=30 Dec 04 10:01:47 crc kubenswrapper[4707]: I1204 10:01:47.917644 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-0" podUID="ae811026-c4eb-4277-b8bc-1ddb48cb97cb" containerName="manila-api" containerID="cri-o://2346ce8f8ee8327db4867e05a2ac0a792f9b2f994a1668fccdb8d9022ec5de34" gracePeriod=30 Dec 04 10:01:47 crc kubenswrapper[4707]: I1204 10:01:47.937239 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila1cdd-account-delete-fm7sw"] Dec 04 10:01:47 crc kubenswrapper[4707]: I1204 10:01:47.938113 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila1cdd-account-delete-fm7sw" Dec 04 10:01:47 crc kubenswrapper[4707]: I1204 10:01:47.953375 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila1cdd-account-delete-fm7sw"] Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.116301 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f67874a-cd60-42d2-8a2e-e0b5c795d14e-operator-scripts\") pod \"manila1cdd-account-delete-fm7sw\" (UID: \"5f67874a-cd60-42d2-8a2e-e0b5c795d14e\") " pod="manila-kuttl-tests/manila1cdd-account-delete-fm7sw" Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.116792 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw5mn\" (UniqueName: \"kubernetes.io/projected/5f67874a-cd60-42d2-8a2e-e0b5c795d14e-kube-api-access-gw5mn\") pod \"manila1cdd-account-delete-fm7sw\" (UID: \"5f67874a-cd60-42d2-8a2e-e0b5c795d14e\") " pod="manila-kuttl-tests/manila1cdd-account-delete-fm7sw" Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.218530 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw5mn\" (UniqueName: \"kubernetes.io/projected/5f67874a-cd60-42d2-8a2e-e0b5c795d14e-kube-api-access-gw5mn\") pod \"manila1cdd-account-delete-fm7sw\" (UID: \"5f67874a-cd60-42d2-8a2e-e0b5c795d14e\") " pod="manila-kuttl-tests/manila1cdd-account-delete-fm7sw" Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.218621 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f67874a-cd60-42d2-8a2e-e0b5c795d14e-operator-scripts\") pod \"manila1cdd-account-delete-fm7sw\" (UID: \"5f67874a-cd60-42d2-8a2e-e0b5c795d14e\") " pod="manila-kuttl-tests/manila1cdd-account-delete-fm7sw" Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.219627 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f67874a-cd60-42d2-8a2e-e0b5c795d14e-operator-scripts\") pod \"manila1cdd-account-delete-fm7sw\" (UID: \"5f67874a-cd60-42d2-8a2e-e0b5c795d14e\") " pod="manila-kuttl-tests/manila1cdd-account-delete-fm7sw" Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.241314 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw5mn\" (UniqueName: \"kubernetes.io/projected/5f67874a-cd60-42d2-8a2e-e0b5c795d14e-kube-api-access-gw5mn\") pod \"manila1cdd-account-delete-fm7sw\" (UID: \"5f67874a-cd60-42d2-8a2e-e0b5c795d14e\") " pod="manila-kuttl-tests/manila1cdd-account-delete-fm7sw" Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.260255 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila1cdd-account-delete-fm7sw" Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.425010 4707 generic.go:334] "Generic (PLEG): container finished" podID="955e4666-6ccc-4432-932a-5fc0801cb0f1" containerID="71a8839a0a658f8fb1693b423cb614031cc6a967613267eab313935b797c821b" exitCode=0 Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.425139 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"955e4666-6ccc-4432-932a-5fc0801cb0f1","Type":"ContainerDied","Data":"71a8839a0a658f8fb1693b423cb614031cc6a967613267eab313935b797c821b"} Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.431848 4707 generic.go:334] "Generic (PLEG): container finished" podID="ae811026-c4eb-4277-b8bc-1ddb48cb97cb" containerID="2f8b3d23dcc12fc484e2275297c55a96f5324a35a55a3e3686328ecfccbcfa3b" exitCode=143 Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.431948 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"ae811026-c4eb-4277-b8bc-1ddb48cb97cb","Type":"ContainerDied","Data":"2f8b3d23dcc12fc484e2275297c55a96f5324a35a55a3e3686328ecfccbcfa3b"} Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.435020 4707 generic.go:334] "Generic (PLEG): container finished" podID="8972074a-5f24-4c51-a83f-787c390d835f" containerID="6cb8f6c8895cec43da3b786d8b08de07afe804de97d8ef27c2ff99e3db78c795" exitCode=0 Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.435052 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"8972074a-5f24-4c51-a83f-787c390d835f","Type":"ContainerDied","Data":"6cb8f6c8895cec43da3b786d8b08de07afe804de97d8ef27c2ff99e3db78c795"} Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.711777 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila1cdd-account-delete-fm7sw"] Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.856907 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824" path="/var/lib/kubelet/pods/214ca2c5-51ca-4dd4-8fc2-ee9ce84e4824/volumes" Dec 04 10:01:48 crc kubenswrapper[4707]: I1204 10:01:48.934768 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.029501 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-ceph\") pod \"955e4666-6ccc-4432-932a-5fc0801cb0f1\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.029881 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-scripts\") pod \"955e4666-6ccc-4432-932a-5fc0801cb0f1\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.029914 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-config-data-custom\") pod \"955e4666-6ccc-4432-932a-5fc0801cb0f1\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.029970 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/955e4666-6ccc-4432-932a-5fc0801cb0f1-var-lib-manila\") pod \"955e4666-6ccc-4432-932a-5fc0801cb0f1\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.029996 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7l2m2\" (UniqueName: \"kubernetes.io/projected/955e4666-6ccc-4432-932a-5fc0801cb0f1-kube-api-access-7l2m2\") pod \"955e4666-6ccc-4432-932a-5fc0801cb0f1\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.030058 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/955e4666-6ccc-4432-932a-5fc0801cb0f1-etc-machine-id\") pod \"955e4666-6ccc-4432-932a-5fc0801cb0f1\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.030106 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-config-data\") pod \"955e4666-6ccc-4432-932a-5fc0801cb0f1\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.030174 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-combined-ca-bundle\") pod \"955e4666-6ccc-4432-932a-5fc0801cb0f1\" (UID: \"955e4666-6ccc-4432-932a-5fc0801cb0f1\") " Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.031940 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/955e4666-6ccc-4432-932a-5fc0801cb0f1-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "955e4666-6ccc-4432-932a-5fc0801cb0f1" (UID: "955e4666-6ccc-4432-932a-5fc0801cb0f1"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.034596 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/955e4666-6ccc-4432-932a-5fc0801cb0f1-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "955e4666-6ccc-4432-932a-5fc0801cb0f1" (UID: "955e4666-6ccc-4432-932a-5fc0801cb0f1"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.036013 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-ceph" (OuterVolumeSpecName: "ceph") pod "955e4666-6ccc-4432-932a-5fc0801cb0f1" (UID: "955e4666-6ccc-4432-932a-5fc0801cb0f1"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.037331 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-scripts" (OuterVolumeSpecName: "scripts") pod "955e4666-6ccc-4432-932a-5fc0801cb0f1" (UID: "955e4666-6ccc-4432-932a-5fc0801cb0f1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.037604 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/955e4666-6ccc-4432-932a-5fc0801cb0f1-kube-api-access-7l2m2" (OuterVolumeSpecName: "kube-api-access-7l2m2") pod "955e4666-6ccc-4432-932a-5fc0801cb0f1" (UID: "955e4666-6ccc-4432-932a-5fc0801cb0f1"). InnerVolumeSpecName "kube-api-access-7l2m2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.045408 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "955e4666-6ccc-4432-932a-5fc0801cb0f1" (UID: "955e4666-6ccc-4432-932a-5fc0801cb0f1"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.081461 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "955e4666-6ccc-4432-932a-5fc0801cb0f1" (UID: "955e4666-6ccc-4432-932a-5fc0801cb0f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.112118 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-config-data" (OuterVolumeSpecName: "config-data") pod "955e4666-6ccc-4432-932a-5fc0801cb0f1" (UID: "955e4666-6ccc-4432-932a-5fc0801cb0f1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.132026 4707 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/955e4666-6ccc-4432-932a-5fc0801cb0f1-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.132064 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.132074 4707 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.132084 4707 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-ceph\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.132092 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.132100 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/955e4666-6ccc-4432-932a-5fc0801cb0f1-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.132110 4707 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/955e4666-6ccc-4432-932a-5fc0801cb0f1-var-lib-manila\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.132119 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7l2m2\" (UniqueName: \"kubernetes.io/projected/955e4666-6ccc-4432-932a-5fc0801cb0f1-kube-api-access-7l2m2\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.443019 4707 generic.go:334] "Generic (PLEG): container finished" podID="5f67874a-cd60-42d2-8a2e-e0b5c795d14e" containerID="35e0af0eb78012c1f7891bf45420b54a68b82b5f86899841ddbdee92719e657b" exitCode=0 Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.443064 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila1cdd-account-delete-fm7sw" event={"ID":"5f67874a-cd60-42d2-8a2e-e0b5c795d14e","Type":"ContainerDied","Data":"35e0af0eb78012c1f7891bf45420b54a68b82b5f86899841ddbdee92719e657b"} Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.443108 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila1cdd-account-delete-fm7sw" event={"ID":"5f67874a-cd60-42d2-8a2e-e0b5c795d14e","Type":"ContainerStarted","Data":"33b3b26b9600b8565896decc691a2bd904c76aeb7a7f21b09d0594167ffcd981"} Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.444964 4707 generic.go:334] "Generic (PLEG): container finished" podID="955e4666-6ccc-4432-932a-5fc0801cb0f1" containerID="93b21ad93edc160d370b9757259cee643495874712d7864a6e47288f23ea7df3" exitCode=1 Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.445017 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"955e4666-6ccc-4432-932a-5fc0801cb0f1","Type":"ContainerDied","Data":"93b21ad93edc160d370b9757259cee643495874712d7864a6e47288f23ea7df3"} Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.445041 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"955e4666-6ccc-4432-932a-5fc0801cb0f1","Type":"ContainerDied","Data":"36e083308874bb8d09d71bbbbc6cd77f0a08d4c890c2a049db7f790915706a15"} Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.445057 4707 scope.go:117] "RemoveContainer" containerID="71a8839a0a658f8fb1693b423cb614031cc6a967613267eab313935b797c821b" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.445168 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.468913 4707 scope.go:117] "RemoveContainer" containerID="93b21ad93edc160d370b9757259cee643495874712d7864a6e47288f23ea7df3" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.482014 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.488484 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.491568 4707 scope.go:117] "RemoveContainer" containerID="71a8839a0a658f8fb1693b423cb614031cc6a967613267eab313935b797c821b" Dec 04 10:01:49 crc kubenswrapper[4707]: E1204 10:01:49.492115 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71a8839a0a658f8fb1693b423cb614031cc6a967613267eab313935b797c821b\": container with ID starting with 71a8839a0a658f8fb1693b423cb614031cc6a967613267eab313935b797c821b not found: ID does not exist" containerID="71a8839a0a658f8fb1693b423cb614031cc6a967613267eab313935b797c821b" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.492163 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71a8839a0a658f8fb1693b423cb614031cc6a967613267eab313935b797c821b"} err="failed to get container status \"71a8839a0a658f8fb1693b423cb614031cc6a967613267eab313935b797c821b\": rpc error: code = NotFound desc = could not find container \"71a8839a0a658f8fb1693b423cb614031cc6a967613267eab313935b797c821b\": container with ID starting with 71a8839a0a658f8fb1693b423cb614031cc6a967613267eab313935b797c821b not found: ID does not exist" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.492191 4707 scope.go:117] "RemoveContainer" containerID="93b21ad93edc160d370b9757259cee643495874712d7864a6e47288f23ea7df3" Dec 04 10:01:49 crc kubenswrapper[4707]: E1204 10:01:49.492712 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93b21ad93edc160d370b9757259cee643495874712d7864a6e47288f23ea7df3\": container with ID starting with 93b21ad93edc160d370b9757259cee643495874712d7864a6e47288f23ea7df3 not found: ID does not exist" containerID="93b21ad93edc160d370b9757259cee643495874712d7864a6e47288f23ea7df3" Dec 04 10:01:49 crc kubenswrapper[4707]: I1204 10:01:49.492741 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93b21ad93edc160d370b9757259cee643495874712d7864a6e47288f23ea7df3"} err="failed to get container status \"93b21ad93edc160d370b9757259cee643495874712d7864a6e47288f23ea7df3\": rpc error: code = NotFound desc = could not find container \"93b21ad93edc160d370b9757259cee643495874712d7864a6e47288f23ea7df3\": container with ID starting with 93b21ad93edc160d370b9757259cee643495874712d7864a6e47288f23ea7df3 not found: ID does not exist" Dec 04 10:01:49 crc kubenswrapper[4707]: E1204 10:01:49.517905 4707 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod955e4666_6ccc_4432_932a_5fc0801cb0f1.slice\": RecentStats: unable to find data in memory cache]" Dec 04 10:01:50 crc kubenswrapper[4707]: I1204 10:01:50.713787 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila1cdd-account-delete-fm7sw" Dec 04 10:01:50 crc kubenswrapper[4707]: I1204 10:01:50.852434 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="955e4666-6ccc-4432-932a-5fc0801cb0f1" path="/var/lib/kubelet/pods/955e4666-6ccc-4432-932a-5fc0801cb0f1/volumes" Dec 04 10:01:50 crc kubenswrapper[4707]: I1204 10:01:50.853262 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f67874a-cd60-42d2-8a2e-e0b5c795d14e-operator-scripts\") pod \"5f67874a-cd60-42d2-8a2e-e0b5c795d14e\" (UID: \"5f67874a-cd60-42d2-8a2e-e0b5c795d14e\") " Dec 04 10:01:50 crc kubenswrapper[4707]: I1204 10:01:50.853433 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gw5mn\" (UniqueName: \"kubernetes.io/projected/5f67874a-cd60-42d2-8a2e-e0b5c795d14e-kube-api-access-gw5mn\") pod \"5f67874a-cd60-42d2-8a2e-e0b5c795d14e\" (UID: \"5f67874a-cd60-42d2-8a2e-e0b5c795d14e\") " Dec 04 10:01:50 crc kubenswrapper[4707]: I1204 10:01:50.853993 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f67874a-cd60-42d2-8a2e-e0b5c795d14e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5f67874a-cd60-42d2-8a2e-e0b5c795d14e" (UID: "5f67874a-cd60-42d2-8a2e-e0b5c795d14e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:01:50 crc kubenswrapper[4707]: I1204 10:01:50.858489 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f67874a-cd60-42d2-8a2e-e0b5c795d14e-kube-api-access-gw5mn" (OuterVolumeSpecName: "kube-api-access-gw5mn") pod "5f67874a-cd60-42d2-8a2e-e0b5c795d14e" (UID: "5f67874a-cd60-42d2-8a2e-e0b5c795d14e"). InnerVolumeSpecName "kube-api-access-gw5mn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:50 crc kubenswrapper[4707]: I1204 10:01:50.955914 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f67874a-cd60-42d2-8a2e-e0b5c795d14e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:50 crc kubenswrapper[4707]: I1204 10:01:50.956661 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gw5mn\" (UniqueName: \"kubernetes.io/projected/5f67874a-cd60-42d2-8a2e-e0b5c795d14e-kube-api-access-gw5mn\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.420592 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.447320 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.477391 4707 generic.go:334] "Generic (PLEG): container finished" podID="ae811026-c4eb-4277-b8bc-1ddb48cb97cb" containerID="2346ce8f8ee8327db4867e05a2ac0a792f9b2f994a1668fccdb8d9022ec5de34" exitCode=0 Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.477425 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"ae811026-c4eb-4277-b8bc-1ddb48cb97cb","Type":"ContainerDied","Data":"2346ce8f8ee8327db4867e05a2ac0a792f9b2f994a1668fccdb8d9022ec5de34"} Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.477458 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"ae811026-c4eb-4277-b8bc-1ddb48cb97cb","Type":"ContainerDied","Data":"d26b322404e61edfbd8f4ec594f0d0f35589c77f9f8f85be8b33260c06d99fc0"} Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.477476 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.477480 4707 scope.go:117] "RemoveContainer" containerID="2346ce8f8ee8327db4867e05a2ac0a792f9b2f994a1668fccdb8d9022ec5de34" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.478748 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila1cdd-account-delete-fm7sw" event={"ID":"5f67874a-cd60-42d2-8a2e-e0b5c795d14e","Type":"ContainerDied","Data":"33b3b26b9600b8565896decc691a2bd904c76aeb7a7f21b09d0594167ffcd981"} Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.478777 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33b3b26b9600b8565896decc691a2bd904c76aeb7a7f21b09d0594167ffcd981" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.478824 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila1cdd-account-delete-fm7sw" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.484413 4707 generic.go:334] "Generic (PLEG): container finished" podID="8972074a-5f24-4c51-a83f-787c390d835f" containerID="8d2ac2de9e802ec5ee53d924dfd1786cf940a3bafcaca70e7abe9c17753a7972" exitCode=0 Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.484446 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"8972074a-5f24-4c51-a83f-787c390d835f","Type":"ContainerDied","Data":"8d2ac2de9e802ec5ee53d924dfd1786cf940a3bafcaca70e7abe9c17753a7972"} Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.484453 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.484467 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"8972074a-5f24-4c51-a83f-787c390d835f","Type":"ContainerDied","Data":"8a2f8b11542b16568394b131f64a6ec171eb35ae18f39901da3667f78e1ce687"} Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.507272 4707 scope.go:117] "RemoveContainer" containerID="2f8b3d23dcc12fc484e2275297c55a96f5324a35a55a3e3686328ecfccbcfa3b" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.524117 4707 scope.go:117] "RemoveContainer" containerID="2346ce8f8ee8327db4867e05a2ac0a792f9b2f994a1668fccdb8d9022ec5de34" Dec 04 10:01:51 crc kubenswrapper[4707]: E1204 10:01:51.524777 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2346ce8f8ee8327db4867e05a2ac0a792f9b2f994a1668fccdb8d9022ec5de34\": container with ID starting with 2346ce8f8ee8327db4867e05a2ac0a792f9b2f994a1668fccdb8d9022ec5de34 not found: ID does not exist" containerID="2346ce8f8ee8327db4867e05a2ac0a792f9b2f994a1668fccdb8d9022ec5de34" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.524853 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2346ce8f8ee8327db4867e05a2ac0a792f9b2f994a1668fccdb8d9022ec5de34"} err="failed to get container status \"2346ce8f8ee8327db4867e05a2ac0a792f9b2f994a1668fccdb8d9022ec5de34\": rpc error: code = NotFound desc = could not find container \"2346ce8f8ee8327db4867e05a2ac0a792f9b2f994a1668fccdb8d9022ec5de34\": container with ID starting with 2346ce8f8ee8327db4867e05a2ac0a792f9b2f994a1668fccdb8d9022ec5de34 not found: ID does not exist" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.524909 4707 scope.go:117] "RemoveContainer" containerID="2f8b3d23dcc12fc484e2275297c55a96f5324a35a55a3e3686328ecfccbcfa3b" Dec 04 10:01:51 crc kubenswrapper[4707]: E1204 10:01:51.525435 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f8b3d23dcc12fc484e2275297c55a96f5324a35a55a3e3686328ecfccbcfa3b\": container with ID starting with 2f8b3d23dcc12fc484e2275297c55a96f5324a35a55a3e3686328ecfccbcfa3b not found: ID does not exist" containerID="2f8b3d23dcc12fc484e2275297c55a96f5324a35a55a3e3686328ecfccbcfa3b" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.525491 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f8b3d23dcc12fc484e2275297c55a96f5324a35a55a3e3686328ecfccbcfa3b"} err="failed to get container status \"2f8b3d23dcc12fc484e2275297c55a96f5324a35a55a3e3686328ecfccbcfa3b\": rpc error: code = NotFound desc = could not find container \"2f8b3d23dcc12fc484e2275297c55a96f5324a35a55a3e3686328ecfccbcfa3b\": container with ID starting with 2f8b3d23dcc12fc484e2275297c55a96f5324a35a55a3e3686328ecfccbcfa3b not found: ID does not exist" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.525516 4707 scope.go:117] "RemoveContainer" containerID="6cb8f6c8895cec43da3b786d8b08de07afe804de97d8ef27c2ff99e3db78c795" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.541668 4707 scope.go:117] "RemoveContainer" containerID="8d2ac2de9e802ec5ee53d924dfd1786cf940a3bafcaca70e7abe9c17753a7972" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.557029 4707 scope.go:117] "RemoveContainer" containerID="6cb8f6c8895cec43da3b786d8b08de07afe804de97d8ef27c2ff99e3db78c795" Dec 04 10:01:51 crc kubenswrapper[4707]: E1204 10:01:51.557693 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cb8f6c8895cec43da3b786d8b08de07afe804de97d8ef27c2ff99e3db78c795\": container with ID starting with 6cb8f6c8895cec43da3b786d8b08de07afe804de97d8ef27c2ff99e3db78c795 not found: ID does not exist" containerID="6cb8f6c8895cec43da3b786d8b08de07afe804de97d8ef27c2ff99e3db78c795" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.557778 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cb8f6c8895cec43da3b786d8b08de07afe804de97d8ef27c2ff99e3db78c795"} err="failed to get container status \"6cb8f6c8895cec43da3b786d8b08de07afe804de97d8ef27c2ff99e3db78c795\": rpc error: code = NotFound desc = could not find container \"6cb8f6c8895cec43da3b786d8b08de07afe804de97d8ef27c2ff99e3db78c795\": container with ID starting with 6cb8f6c8895cec43da3b786d8b08de07afe804de97d8ef27c2ff99e3db78c795 not found: ID does not exist" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.557813 4707 scope.go:117] "RemoveContainer" containerID="8d2ac2de9e802ec5ee53d924dfd1786cf940a3bafcaca70e7abe9c17753a7972" Dec 04 10:01:51 crc kubenswrapper[4707]: E1204 10:01:51.558319 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d2ac2de9e802ec5ee53d924dfd1786cf940a3bafcaca70e7abe9c17753a7972\": container with ID starting with 8d2ac2de9e802ec5ee53d924dfd1786cf940a3bafcaca70e7abe9c17753a7972 not found: ID does not exist" containerID="8d2ac2de9e802ec5ee53d924dfd1786cf940a3bafcaca70e7abe9c17753a7972" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.558366 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d2ac2de9e802ec5ee53d924dfd1786cf940a3bafcaca70e7abe9c17753a7972"} err="failed to get container status \"8d2ac2de9e802ec5ee53d924dfd1786cf940a3bafcaca70e7abe9c17753a7972\": rpc error: code = NotFound desc = could not find container \"8d2ac2de9e802ec5ee53d924dfd1786cf940a3bafcaca70e7abe9c17753a7972\": container with ID starting with 8d2ac2de9e802ec5ee53d924dfd1786cf940a3bafcaca70e7abe9c17753a7972 not found: ID does not exist" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563462 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxtcb\" (UniqueName: \"kubernetes.io/projected/8972074a-5f24-4c51-a83f-787c390d835f-kube-api-access-xxtcb\") pod \"8972074a-5f24-4c51-a83f-787c390d835f\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563516 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-internal-tls-certs\") pod \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563541 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-config-data\") pod \"8972074a-5f24-4c51-a83f-787c390d835f\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563557 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8972074a-5f24-4c51-a83f-787c390d835f-etc-machine-id\") pod \"8972074a-5f24-4c51-a83f-787c390d835f\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563583 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-combined-ca-bundle\") pod \"8972074a-5f24-4c51-a83f-787c390d835f\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563597 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-scripts\") pod \"8972074a-5f24-4c51-a83f-787c390d835f\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563639 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-combined-ca-bundle\") pod \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563656 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-public-tls-certs\") pod \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563662 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8972074a-5f24-4c51-a83f-787c390d835f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8972074a-5f24-4c51-a83f-787c390d835f" (UID: "8972074a-5f24-4c51-a83f-787c390d835f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563684 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvvb8\" (UniqueName: \"kubernetes.io/projected/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-kube-api-access-lvvb8\") pod \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563705 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-config-data\") pod \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563734 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-logs\") pod \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563771 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-etc-machine-id\") pod \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563791 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-scripts\") pod \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563866 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-config-data-custom\") pod \"8972074a-5f24-4c51-a83f-787c390d835f\" (UID: \"8972074a-5f24-4c51-a83f-787c390d835f\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.563907 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-config-data-custom\") pod \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\" (UID: \"ae811026-c4eb-4277-b8bc-1ddb48cb97cb\") " Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.564159 4707 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8972074a-5f24-4c51-a83f-787c390d835f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.564148 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ae811026-c4eb-4277-b8bc-1ddb48cb97cb" (UID: "ae811026-c4eb-4277-b8bc-1ddb48cb97cb"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.564475 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-logs" (OuterVolumeSpecName: "logs") pod "ae811026-c4eb-4277-b8bc-1ddb48cb97cb" (UID: "ae811026-c4eb-4277-b8bc-1ddb48cb97cb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.569779 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ae811026-c4eb-4277-b8bc-1ddb48cb97cb" (UID: "ae811026-c4eb-4277-b8bc-1ddb48cb97cb"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.569818 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-scripts" (OuterVolumeSpecName: "scripts") pod "8972074a-5f24-4c51-a83f-787c390d835f" (UID: "8972074a-5f24-4c51-a83f-787c390d835f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.569845 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-scripts" (OuterVolumeSpecName: "scripts") pod "ae811026-c4eb-4277-b8bc-1ddb48cb97cb" (UID: "ae811026-c4eb-4277-b8bc-1ddb48cb97cb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.569886 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-kube-api-access-lvvb8" (OuterVolumeSpecName: "kube-api-access-lvvb8") pod "ae811026-c4eb-4277-b8bc-1ddb48cb97cb" (UID: "ae811026-c4eb-4277-b8bc-1ddb48cb97cb"). InnerVolumeSpecName "kube-api-access-lvvb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.570028 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8972074a-5f24-4c51-a83f-787c390d835f-kube-api-access-xxtcb" (OuterVolumeSpecName: "kube-api-access-xxtcb") pod "8972074a-5f24-4c51-a83f-787c390d835f" (UID: "8972074a-5f24-4c51-a83f-787c390d835f"). InnerVolumeSpecName "kube-api-access-xxtcb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.571728 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8972074a-5f24-4c51-a83f-787c390d835f" (UID: "8972074a-5f24-4c51-a83f-787c390d835f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.585462 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae811026-c4eb-4277-b8bc-1ddb48cb97cb" (UID: "ae811026-c4eb-4277-b8bc-1ddb48cb97cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.602571 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ae811026-c4eb-4277-b8bc-1ddb48cb97cb" (UID: "ae811026-c4eb-4277-b8bc-1ddb48cb97cb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.603585 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8972074a-5f24-4c51-a83f-787c390d835f" (UID: "8972074a-5f24-4c51-a83f-787c390d835f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.603900 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-config-data" (OuterVolumeSpecName: "config-data") pod "ae811026-c4eb-4277-b8bc-1ddb48cb97cb" (UID: "ae811026-c4eb-4277-b8bc-1ddb48cb97cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.604410 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ae811026-c4eb-4277-b8bc-1ddb48cb97cb" (UID: "ae811026-c4eb-4277-b8bc-1ddb48cb97cb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.630213 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-config-data" (OuterVolumeSpecName: "config-data") pod "8972074a-5f24-4c51-a83f-787c390d835f" (UID: "8972074a-5f24-4c51-a83f-787c390d835f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.665992 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxtcb\" (UniqueName: \"kubernetes.io/projected/8972074a-5f24-4c51-a83f-787c390d835f-kube-api-access-xxtcb\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.666049 4707 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.666058 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.666067 4707 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.666077 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.666087 4707 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.666095 4707 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.666103 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvvb8\" (UniqueName: \"kubernetes.io/projected/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-kube-api-access-lvvb8\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.666111 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.666119 4707 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-logs\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.666127 4707 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.666134 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.666142 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8972074a-5f24-4c51-a83f-787c390d835f-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.666151 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae811026-c4eb-4277-b8bc-1ddb48cb97cb-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.808164 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.816947 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.822895 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:01:51 crc kubenswrapper[4707]: I1204 10:01:51.828641 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:01:52 crc kubenswrapper[4707]: I1204 10:01:52.851900 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8972074a-5f24-4c51-a83f-787c390d835f" path="/var/lib/kubelet/pods/8972074a-5f24-4c51-a83f-787c390d835f/volumes" Dec 04 10:01:52 crc kubenswrapper[4707]: I1204 10:01:52.852546 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae811026-c4eb-4277-b8bc-1ddb48cb97cb" path="/var/lib/kubelet/pods/ae811026-c4eb-4277-b8bc-1ddb48cb97cb/volumes" Dec 04 10:01:52 crc kubenswrapper[4707]: I1204 10:01:52.965366 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-db-create-k8h5v"] Dec 04 10:01:52 crc kubenswrapper[4707]: I1204 10:01:52.973095 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila1cdd-account-delete-fm7sw"] Dec 04 10:01:52 crc kubenswrapper[4707]: I1204 10:01:52.982427 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-db-create-k8h5v"] Dec 04 10:01:52 crc kubenswrapper[4707]: I1204 10:01:52.990371 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila1cdd-account-delete-fm7sw"] Dec 04 10:01:52 crc kubenswrapper[4707]: I1204 10:01:52.999236 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz"] Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.006508 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-1cdd-account-create-update-qv9sz"] Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.051825 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-db-create-d2zn9"] Dec 04 10:01:53 crc kubenswrapper[4707]: E1204 10:01:53.052126 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f67874a-cd60-42d2-8a2e-e0b5c795d14e" containerName="mariadb-account-delete" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052143 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f67874a-cd60-42d2-8a2e-e0b5c795d14e" containerName="mariadb-account-delete" Dec 04 10:01:53 crc kubenswrapper[4707]: E1204 10:01:53.052164 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae811026-c4eb-4277-b8bc-1ddb48cb97cb" containerName="manila-api-log" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052172 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae811026-c4eb-4277-b8bc-1ddb48cb97cb" containerName="manila-api-log" Dec 04 10:01:53 crc kubenswrapper[4707]: E1204 10:01:53.052182 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="955e4666-6ccc-4432-932a-5fc0801cb0f1" containerName="probe" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052189 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="955e4666-6ccc-4432-932a-5fc0801cb0f1" containerName="probe" Dec 04 10:01:53 crc kubenswrapper[4707]: E1204 10:01:53.052198 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8972074a-5f24-4c51-a83f-787c390d835f" containerName="manila-scheduler" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052206 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="8972074a-5f24-4c51-a83f-787c390d835f" containerName="manila-scheduler" Dec 04 10:01:53 crc kubenswrapper[4707]: E1204 10:01:53.052219 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae811026-c4eb-4277-b8bc-1ddb48cb97cb" containerName="manila-api" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052228 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae811026-c4eb-4277-b8bc-1ddb48cb97cb" containerName="manila-api" Dec 04 10:01:53 crc kubenswrapper[4707]: E1204 10:01:53.052242 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="955e4666-6ccc-4432-932a-5fc0801cb0f1" containerName="manila-share" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052250 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="955e4666-6ccc-4432-932a-5fc0801cb0f1" containerName="manila-share" Dec 04 10:01:53 crc kubenswrapper[4707]: E1204 10:01:53.052258 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8972074a-5f24-4c51-a83f-787c390d835f" containerName="probe" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052266 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="8972074a-5f24-4c51-a83f-787c390d835f" containerName="probe" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052417 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="955e4666-6ccc-4432-932a-5fc0801cb0f1" containerName="manila-share" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052429 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="8972074a-5f24-4c51-a83f-787c390d835f" containerName="probe" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052442 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae811026-c4eb-4277-b8bc-1ddb48cb97cb" containerName="manila-api" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052451 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="955e4666-6ccc-4432-932a-5fc0801cb0f1" containerName="probe" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052467 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f67874a-cd60-42d2-8a2e-e0b5c795d14e" containerName="mariadb-account-delete" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052476 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae811026-c4eb-4277-b8bc-1ddb48cb97cb" containerName="manila-api-log" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052488 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="8972074a-5f24-4c51-a83f-787c390d835f" containerName="manila-scheduler" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.052993 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-d2zn9" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.063458 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-create-d2zn9"] Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.152721 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-5c72-account-create-update-z9r5d"] Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.153509 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-5c72-account-create-update-z9r5d" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.155591 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-db-secret" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.165351 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-5c72-account-create-update-z9r5d"] Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.190583 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29bb829a-15e0-461b-96ce-8f42ec71a111-operator-scripts\") pod \"manila-db-create-d2zn9\" (UID: \"29bb829a-15e0-461b-96ce-8f42ec71a111\") " pod="manila-kuttl-tests/manila-db-create-d2zn9" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.190685 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-785n2\" (UniqueName: \"kubernetes.io/projected/29bb829a-15e0-461b-96ce-8f42ec71a111-kube-api-access-785n2\") pod \"manila-db-create-d2zn9\" (UID: \"29bb829a-15e0-461b-96ce-8f42ec71a111\") " pod="manila-kuttl-tests/manila-db-create-d2zn9" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.291483 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dn74v\" (UniqueName: \"kubernetes.io/projected/0d8fa69c-f195-4b3b-9f65-dd81969b024b-kube-api-access-dn74v\") pod \"manila-5c72-account-create-update-z9r5d\" (UID: \"0d8fa69c-f195-4b3b-9f65-dd81969b024b\") " pod="manila-kuttl-tests/manila-5c72-account-create-update-z9r5d" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.291561 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-785n2\" (UniqueName: \"kubernetes.io/projected/29bb829a-15e0-461b-96ce-8f42ec71a111-kube-api-access-785n2\") pod \"manila-db-create-d2zn9\" (UID: \"29bb829a-15e0-461b-96ce-8f42ec71a111\") " pod="manila-kuttl-tests/manila-db-create-d2zn9" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.291662 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29bb829a-15e0-461b-96ce-8f42ec71a111-operator-scripts\") pod \"manila-db-create-d2zn9\" (UID: \"29bb829a-15e0-461b-96ce-8f42ec71a111\") " pod="manila-kuttl-tests/manila-db-create-d2zn9" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.292053 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d8fa69c-f195-4b3b-9f65-dd81969b024b-operator-scripts\") pod \"manila-5c72-account-create-update-z9r5d\" (UID: \"0d8fa69c-f195-4b3b-9f65-dd81969b024b\") " pod="manila-kuttl-tests/manila-5c72-account-create-update-z9r5d" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.292964 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29bb829a-15e0-461b-96ce-8f42ec71a111-operator-scripts\") pod \"manila-db-create-d2zn9\" (UID: \"29bb829a-15e0-461b-96ce-8f42ec71a111\") " pod="manila-kuttl-tests/manila-db-create-d2zn9" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.309797 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-785n2\" (UniqueName: \"kubernetes.io/projected/29bb829a-15e0-461b-96ce-8f42ec71a111-kube-api-access-785n2\") pod \"manila-db-create-d2zn9\" (UID: \"29bb829a-15e0-461b-96ce-8f42ec71a111\") " pod="manila-kuttl-tests/manila-db-create-d2zn9" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.370447 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-d2zn9" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.393069 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d8fa69c-f195-4b3b-9f65-dd81969b024b-operator-scripts\") pod \"manila-5c72-account-create-update-z9r5d\" (UID: \"0d8fa69c-f195-4b3b-9f65-dd81969b024b\") " pod="manila-kuttl-tests/manila-5c72-account-create-update-z9r5d" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.393362 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dn74v\" (UniqueName: \"kubernetes.io/projected/0d8fa69c-f195-4b3b-9f65-dd81969b024b-kube-api-access-dn74v\") pod \"manila-5c72-account-create-update-z9r5d\" (UID: \"0d8fa69c-f195-4b3b-9f65-dd81969b024b\") " pod="manila-kuttl-tests/manila-5c72-account-create-update-z9r5d" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.393994 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d8fa69c-f195-4b3b-9f65-dd81969b024b-operator-scripts\") pod \"manila-5c72-account-create-update-z9r5d\" (UID: \"0d8fa69c-f195-4b3b-9f65-dd81969b024b\") " pod="manila-kuttl-tests/manila-5c72-account-create-update-z9r5d" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.410295 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dn74v\" (UniqueName: \"kubernetes.io/projected/0d8fa69c-f195-4b3b-9f65-dd81969b024b-kube-api-access-dn74v\") pod \"manila-5c72-account-create-update-z9r5d\" (UID: \"0d8fa69c-f195-4b3b-9f65-dd81969b024b\") " pod="manila-kuttl-tests/manila-5c72-account-create-update-z9r5d" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.466275 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-5c72-account-create-update-z9r5d" Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.865573 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-create-d2zn9"] Dec 04 10:01:53 crc kubenswrapper[4707]: W1204 10:01:53.867459 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29bb829a_15e0_461b_96ce_8f42ec71a111.slice/crio-996ed57d5f9d571fa797bd6c4e6c583dab899c435d5fe1dee32238ac01cc13d5 WatchSource:0}: Error finding container 996ed57d5f9d571fa797bd6c4e6c583dab899c435d5fe1dee32238ac01cc13d5: Status 404 returned error can't find the container with id 996ed57d5f9d571fa797bd6c4e6c583dab899c435d5fe1dee32238ac01cc13d5 Dec 04 10:01:53 crc kubenswrapper[4707]: I1204 10:01:53.928141 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-5c72-account-create-update-z9r5d"] Dec 04 10:01:54 crc kubenswrapper[4707]: I1204 10:01:54.516638 4707 generic.go:334] "Generic (PLEG): container finished" podID="0d8fa69c-f195-4b3b-9f65-dd81969b024b" containerID="a97f7903c46e1e76f61f3c621500d5c28879e1f2d888c3381efdfe5c22d74606" exitCode=0 Dec 04 10:01:54 crc kubenswrapper[4707]: I1204 10:01:54.516785 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-5c72-account-create-update-z9r5d" event={"ID":"0d8fa69c-f195-4b3b-9f65-dd81969b024b","Type":"ContainerDied","Data":"a97f7903c46e1e76f61f3c621500d5c28879e1f2d888c3381efdfe5c22d74606"} Dec 04 10:01:54 crc kubenswrapper[4707]: I1204 10:01:54.516820 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-5c72-account-create-update-z9r5d" event={"ID":"0d8fa69c-f195-4b3b-9f65-dd81969b024b","Type":"ContainerStarted","Data":"ff9a832a83ac918a48b742987385a17b0a947d5788fbc6fca7acd380c1f8c128"} Dec 04 10:01:54 crc kubenswrapper[4707]: I1204 10:01:54.518718 4707 generic.go:334] "Generic (PLEG): container finished" podID="29bb829a-15e0-461b-96ce-8f42ec71a111" containerID="20f047cf685c38373adeb4fc55ec060340fded713d5daea02ff6d7d9b754af16" exitCode=0 Dec 04 10:01:54 crc kubenswrapper[4707]: I1204 10:01:54.518780 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-d2zn9" event={"ID":"29bb829a-15e0-461b-96ce-8f42ec71a111","Type":"ContainerDied","Data":"20f047cf685c38373adeb4fc55ec060340fded713d5daea02ff6d7d9b754af16"} Dec 04 10:01:54 crc kubenswrapper[4707]: I1204 10:01:54.518810 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-d2zn9" event={"ID":"29bb829a-15e0-461b-96ce-8f42ec71a111","Type":"ContainerStarted","Data":"996ed57d5f9d571fa797bd6c4e6c583dab899c435d5fe1dee32238ac01cc13d5"} Dec 04 10:01:54 crc kubenswrapper[4707]: I1204 10:01:54.852738 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d84b0f2-1df4-42e0-a47d-87f65c9d27c5" path="/var/lib/kubelet/pods/5d84b0f2-1df4-42e0-a47d-87f65c9d27c5/volumes" Dec 04 10:01:54 crc kubenswrapper[4707]: I1204 10:01:54.853670 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f67874a-cd60-42d2-8a2e-e0b5c795d14e" path="/var/lib/kubelet/pods/5f67874a-cd60-42d2-8a2e-e0b5c795d14e/volumes" Dec 04 10:01:54 crc kubenswrapper[4707]: I1204 10:01:54.854197 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="775ebfd2-ec29-4a2a-94cc-d6ed096df7bd" path="/var/lib/kubelet/pods/775ebfd2-ec29-4a2a-94cc-d6ed096df7bd/volumes" Dec 04 10:01:55 crc kubenswrapper[4707]: I1204 10:01:55.867757 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-5c72-account-create-update-z9r5d" Dec 04 10:01:55 crc kubenswrapper[4707]: I1204 10:01:55.874995 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-d2zn9" Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.036283 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-785n2\" (UniqueName: \"kubernetes.io/projected/29bb829a-15e0-461b-96ce-8f42ec71a111-kube-api-access-785n2\") pod \"29bb829a-15e0-461b-96ce-8f42ec71a111\" (UID: \"29bb829a-15e0-461b-96ce-8f42ec71a111\") " Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.036858 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d8fa69c-f195-4b3b-9f65-dd81969b024b-operator-scripts\") pod \"0d8fa69c-f195-4b3b-9f65-dd81969b024b\" (UID: \"0d8fa69c-f195-4b3b-9f65-dd81969b024b\") " Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.036889 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29bb829a-15e0-461b-96ce-8f42ec71a111-operator-scripts\") pod \"29bb829a-15e0-461b-96ce-8f42ec71a111\" (UID: \"29bb829a-15e0-461b-96ce-8f42ec71a111\") " Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.036912 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dn74v\" (UniqueName: \"kubernetes.io/projected/0d8fa69c-f195-4b3b-9f65-dd81969b024b-kube-api-access-dn74v\") pod \"0d8fa69c-f195-4b3b-9f65-dd81969b024b\" (UID: \"0d8fa69c-f195-4b3b-9f65-dd81969b024b\") " Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.037416 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d8fa69c-f195-4b3b-9f65-dd81969b024b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0d8fa69c-f195-4b3b-9f65-dd81969b024b" (UID: "0d8fa69c-f195-4b3b-9f65-dd81969b024b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.037562 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29bb829a-15e0-461b-96ce-8f42ec71a111-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "29bb829a-15e0-461b-96ce-8f42ec71a111" (UID: "29bb829a-15e0-461b-96ce-8f42ec71a111"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.042851 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29bb829a-15e0-461b-96ce-8f42ec71a111-kube-api-access-785n2" (OuterVolumeSpecName: "kube-api-access-785n2") pod "29bb829a-15e0-461b-96ce-8f42ec71a111" (UID: "29bb829a-15e0-461b-96ce-8f42ec71a111"). InnerVolumeSpecName "kube-api-access-785n2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.043008 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d8fa69c-f195-4b3b-9f65-dd81969b024b-kube-api-access-dn74v" (OuterVolumeSpecName: "kube-api-access-dn74v") pod "0d8fa69c-f195-4b3b-9f65-dd81969b024b" (UID: "0d8fa69c-f195-4b3b-9f65-dd81969b024b"). InnerVolumeSpecName "kube-api-access-dn74v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.138515 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-785n2\" (UniqueName: \"kubernetes.io/projected/29bb829a-15e0-461b-96ce-8f42ec71a111-kube-api-access-785n2\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.138562 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d8fa69c-f195-4b3b-9f65-dd81969b024b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.138572 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29bb829a-15e0-461b-96ce-8f42ec71a111-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.138581 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dn74v\" (UniqueName: \"kubernetes.io/projected/0d8fa69c-f195-4b3b-9f65-dd81969b024b-kube-api-access-dn74v\") on node \"crc\" DevicePath \"\"" Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.534416 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-5c72-account-create-update-z9r5d" Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.534445 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-5c72-account-create-update-z9r5d" event={"ID":"0d8fa69c-f195-4b3b-9f65-dd81969b024b","Type":"ContainerDied","Data":"ff9a832a83ac918a48b742987385a17b0a947d5788fbc6fca7acd380c1f8c128"} Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.534481 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff9a832a83ac918a48b742987385a17b0a947d5788fbc6fca7acd380c1f8c128" Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.538724 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-d2zn9" event={"ID":"29bb829a-15e0-461b-96ce-8f42ec71a111","Type":"ContainerDied","Data":"996ed57d5f9d571fa797bd6c4e6c583dab899c435d5fe1dee32238ac01cc13d5"} Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.538753 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="996ed57d5f9d571fa797bd6c4e6c583dab899c435d5fe1dee32238ac01cc13d5" Dec 04 10:01:56 crc kubenswrapper[4707]: I1204 10:01:56.538809 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-d2zn9" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.472652 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-db-sync-8kt4d"] Dec 04 10:01:58 crc kubenswrapper[4707]: E1204 10:01:58.473125 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d8fa69c-f195-4b3b-9f65-dd81969b024b" containerName="mariadb-account-create-update" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.473137 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d8fa69c-f195-4b3b-9f65-dd81969b024b" containerName="mariadb-account-create-update" Dec 04 10:01:58 crc kubenswrapper[4707]: E1204 10:01:58.473161 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29bb829a-15e0-461b-96ce-8f42ec71a111" containerName="mariadb-database-create" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.473167 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="29bb829a-15e0-461b-96ce-8f42ec71a111" containerName="mariadb-database-create" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.473294 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d8fa69c-f195-4b3b-9f65-dd81969b024b" containerName="mariadb-account-create-update" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.473305 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="29bb829a-15e0-461b-96ce-8f42ec71a111" containerName="mariadb-database-create" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.473725 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-8kt4d" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.475453 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-config-data" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.475727 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-manila-dockercfg-zsr2k" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.481669 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-sync-8kt4d"] Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.569801 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-config-data\") pod \"manila-db-sync-8kt4d\" (UID: \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\") " pod="manila-kuttl-tests/manila-db-sync-8kt4d" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.569858 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9q5d\" (UniqueName: \"kubernetes.io/projected/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-kube-api-access-g9q5d\") pod \"manila-db-sync-8kt4d\" (UID: \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\") " pod="manila-kuttl-tests/manila-db-sync-8kt4d" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.570015 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-job-config-data\") pod \"manila-db-sync-8kt4d\" (UID: \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\") " pod="manila-kuttl-tests/manila-db-sync-8kt4d" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.671705 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-job-config-data\") pod \"manila-db-sync-8kt4d\" (UID: \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\") " pod="manila-kuttl-tests/manila-db-sync-8kt4d" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.671779 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-config-data\") pod \"manila-db-sync-8kt4d\" (UID: \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\") " pod="manila-kuttl-tests/manila-db-sync-8kt4d" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.671810 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9q5d\" (UniqueName: \"kubernetes.io/projected/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-kube-api-access-g9q5d\") pod \"manila-db-sync-8kt4d\" (UID: \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\") " pod="manila-kuttl-tests/manila-db-sync-8kt4d" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.676774 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-job-config-data\") pod \"manila-db-sync-8kt4d\" (UID: \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\") " pod="manila-kuttl-tests/manila-db-sync-8kt4d" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.686322 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-config-data\") pod \"manila-db-sync-8kt4d\" (UID: \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\") " pod="manila-kuttl-tests/manila-db-sync-8kt4d" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.689743 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9q5d\" (UniqueName: \"kubernetes.io/projected/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-kube-api-access-g9q5d\") pod \"manila-db-sync-8kt4d\" (UID: \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\") " pod="manila-kuttl-tests/manila-db-sync-8kt4d" Dec 04 10:01:58 crc kubenswrapper[4707]: I1204 10:01:58.788397 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-8kt4d" Dec 04 10:01:59 crc kubenswrapper[4707]: I1204 10:01:59.211117 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-sync-8kt4d"] Dec 04 10:01:59 crc kubenswrapper[4707]: I1204 10:01:59.560824 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-8kt4d" event={"ID":"b01e4ed0-19ec-40e3-86fc-7cb2784515ad","Type":"ContainerStarted","Data":"64437ccf773361794da58f159508235121e29babaf40ef7b2e36b5da3630e44e"} Dec 04 10:01:59 crc kubenswrapper[4707]: I1204 10:01:59.560888 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-8kt4d" event={"ID":"b01e4ed0-19ec-40e3-86fc-7cb2784515ad","Type":"ContainerStarted","Data":"920f2b58712f7bcd26cae4ebf0e852c07df6a585025984d7f839229937edc945"} Dec 04 10:01:59 crc kubenswrapper[4707]: I1204 10:01:59.578047 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-db-sync-8kt4d" podStartSLOduration=1.578027896 podStartE2EDuration="1.578027896s" podCreationTimestamp="2025-12-04 10:01:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:01:59.576813159 +0000 UTC m=+1419.012635686" watchObservedRunningTime="2025-12-04 10:01:59.578027896 +0000 UTC m=+1419.013850403" Dec 04 10:02:00 crc kubenswrapper[4707]: I1204 10:02:00.817830 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 10:02:00 crc kubenswrapper[4707]: I1204 10:02:00.818271 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 10:02:01 crc kubenswrapper[4707]: I1204 10:02:01.575806 4707 generic.go:334] "Generic (PLEG): container finished" podID="b01e4ed0-19ec-40e3-86fc-7cb2784515ad" containerID="64437ccf773361794da58f159508235121e29babaf40ef7b2e36b5da3630e44e" exitCode=0 Dec 04 10:02:01 crc kubenswrapper[4707]: I1204 10:02:01.575853 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-8kt4d" event={"ID":"b01e4ed0-19ec-40e3-86fc-7cb2784515ad","Type":"ContainerDied","Data":"64437ccf773361794da58f159508235121e29babaf40ef7b2e36b5da3630e44e"} Dec 04 10:02:02 crc kubenswrapper[4707]: I1204 10:02:02.852465 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-8kt4d" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.036326 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9q5d\" (UniqueName: \"kubernetes.io/projected/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-kube-api-access-g9q5d\") pod \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\" (UID: \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\") " Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.036480 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-job-config-data\") pod \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\" (UID: \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\") " Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.036574 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-config-data\") pod \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\" (UID: \"b01e4ed0-19ec-40e3-86fc-7cb2784515ad\") " Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.042632 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-kube-api-access-g9q5d" (OuterVolumeSpecName: "kube-api-access-g9q5d") pod "b01e4ed0-19ec-40e3-86fc-7cb2784515ad" (UID: "b01e4ed0-19ec-40e3-86fc-7cb2784515ad"). InnerVolumeSpecName "kube-api-access-g9q5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.043358 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "b01e4ed0-19ec-40e3-86fc-7cb2784515ad" (UID: "b01e4ed0-19ec-40e3-86fc-7cb2784515ad"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.047276 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-config-data" (OuterVolumeSpecName: "config-data") pod "b01e4ed0-19ec-40e3-86fc-7cb2784515ad" (UID: "b01e4ed0-19ec-40e3-86fc-7cb2784515ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.140273 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.140320 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9q5d\" (UniqueName: \"kubernetes.io/projected/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-kube-api-access-g9q5d\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.140353 4707 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b01e4ed0-19ec-40e3-86fc-7cb2784515ad-job-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.591564 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-8kt4d" event={"ID":"b01e4ed0-19ec-40e3-86fc-7cb2784515ad","Type":"ContainerDied","Data":"920f2b58712f7bcd26cae4ebf0e852c07df6a585025984d7f839229937edc945"} Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.591622 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="920f2b58712f7bcd26cae4ebf0e852c07df6a585025984d7f839229937edc945" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.591641 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-8kt4d" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.879862 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:02:03 crc kubenswrapper[4707]: E1204 10:02:03.880187 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b01e4ed0-19ec-40e3-86fc-7cb2784515ad" containerName="manila-db-sync" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.880202 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="b01e4ed0-19ec-40e3-86fc-7cb2784515ad" containerName="manila-db-sync" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.880332 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="b01e4ed0-19ec-40e3-86fc-7cb2784515ad" containerName="manila-db-sync" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.881187 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.888832 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-scripts" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.889159 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-config-data" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.889382 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-scheduler-config-data" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.890093 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-manila-dockercfg-zsr2k" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.905840 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.986306 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.987528 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.993392 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.994635 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:03 crc kubenswrapper[4707]: I1204 10:02:03.995212 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-api-config-data" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.000313 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"ceph-conf-files" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.000585 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-share-share0-config-data" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.007145 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.012965 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.054238 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-config-data\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.054302 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-827pz\" (UniqueName: \"kubernetes.io/projected/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-kube-api-access-827pz\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.054465 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.054523 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.054585 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-scripts\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.156005 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-config-data\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.156430 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bc01c02-a12a-45f0-9187-f358515e39c8-logs\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.156530 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-config-data\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.156637 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-scripts\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.156744 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvv2b\" (UniqueName: \"kubernetes.io/projected/4bc01c02-a12a-45f0-9187-f358515e39c8-kube-api-access-lvv2b\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.156849 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-827pz\" (UniqueName: \"kubernetes.io/projected/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-kube-api-access-827pz\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.156943 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-ceph\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.157019 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-config-data\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.157108 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-config-data-custom\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.157258 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.157693 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.157794 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.157841 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.157966 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-scripts\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.158072 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.158159 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4bc01c02-a12a-45f0-9187-f358515e39c8-etc-machine-id\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.158246 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-scripts\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.158386 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.158530 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gnhw\" (UniqueName: \"kubernetes.io/projected/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-kube-api-access-7gnhw\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.161195 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-config-data\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.161461 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.162109 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-scripts\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.175782 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-827pz\" (UniqueName: \"kubernetes.io/projected/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-kube-api-access-827pz\") pod \"manila-scheduler-0\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.201623 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.259554 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.259720 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gnhw\" (UniqueName: \"kubernetes.io/projected/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-kube-api-access-7gnhw\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.259639 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.260151 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bc01c02-a12a-45f0-9187-f358515e39c8-logs\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.260559 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bc01c02-a12a-45f0-9187-f358515e39c8-logs\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.260624 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-config-data\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.261346 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvv2b\" (UniqueName: \"kubernetes.io/projected/4bc01c02-a12a-45f0-9187-f358515e39c8-kube-api-access-lvv2b\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.261385 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-scripts\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.261426 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-ceph\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.261440 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-config-data\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.261495 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-config-data-custom\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.261540 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.261590 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-scripts\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.261609 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.261624 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4bc01c02-a12a-45f0-9187-f358515e39c8-etc-machine-id\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.261702 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4bc01c02-a12a-45f0-9187-f358515e39c8-etc-machine-id\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.261884 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.264655 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-ceph\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.266086 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-scripts\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.266695 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-config-data\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.274863 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-config-data\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.275536 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.278017 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-config-data-custom\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.278368 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-scripts\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.278610 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gnhw\" (UniqueName: \"kubernetes.io/projected/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-kube-api-access-7gnhw\") pod \"manila-share-share0-0\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.279024 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvv2b\" (UniqueName: \"kubernetes.io/projected/4bc01c02-a12a-45f0-9187-f358515e39c8-kube-api-access-lvv2b\") pod \"manila-api-0\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.307982 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:04 crc kubenswrapper[4707]: I1204 10:02:04.324413 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:05 crc kubenswrapper[4707]: I1204 10:02:04.468171 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:02:05 crc kubenswrapper[4707]: I1204 10:02:04.598285 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29","Type":"ContainerStarted","Data":"a48a005290ca9c2e49aa98024e8b8b6032b5ae844d4615804bec3db57318c88b"} Dec 04 10:02:05 crc kubenswrapper[4707]: I1204 10:02:05.610307 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29","Type":"ContainerStarted","Data":"79f9072ed2c27a235fe6f99800b9c4524cc38ef458cb22dd31efb1728c78bc73"} Dec 04 10:02:05 crc kubenswrapper[4707]: I1204 10:02:05.610766 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29","Type":"ContainerStarted","Data":"295b9de08976158bd86dacb86fed922df8171174e67b75b610e9bfbace7ee012"} Dec 04 10:02:05 crc kubenswrapper[4707]: I1204 10:02:05.637066 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-scheduler-0" podStartSLOduration=2.637044723 podStartE2EDuration="2.637044723s" podCreationTimestamp="2025-12-04 10:02:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:02:05.636461905 +0000 UTC m=+1425.072284432" watchObservedRunningTime="2025-12-04 10:02:05.637044723 +0000 UTC m=+1425.072867230" Dec 04 10:02:06 crc kubenswrapper[4707]: I1204 10:02:06.027528 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:02:06 crc kubenswrapper[4707]: I1204 10:02:06.033861 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:02:06 crc kubenswrapper[4707]: I1204 10:02:06.629588 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998","Type":"ContainerStarted","Data":"e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5"} Dec 04 10:02:06 crc kubenswrapper[4707]: I1204 10:02:06.630300 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998","Type":"ContainerStarted","Data":"2577dd4b27f4328d336ddeac457f9919c80113d2516e18012bc29b474ee94686"} Dec 04 10:02:06 crc kubenswrapper[4707]: I1204 10:02:06.631183 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"4bc01c02-a12a-45f0-9187-f358515e39c8","Type":"ContainerStarted","Data":"55d81b2cd6f2fcbab49f93f1ec02a6f8cac85c94a809cd1012f73472b668b412"} Dec 04 10:02:06 crc kubenswrapper[4707]: I1204 10:02:06.631241 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"4bc01c02-a12a-45f0-9187-f358515e39c8","Type":"ContainerStarted","Data":"ad2cfff69fd8e1e9a0f29333bfab5e3441776b489d97824aeb15cb4fc7fc5492"} Dec 04 10:02:07 crc kubenswrapper[4707]: I1204 10:02:07.640969 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998","Type":"ContainerStarted","Data":"b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669"} Dec 04 10:02:07 crc kubenswrapper[4707]: I1204 10:02:07.642994 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"4bc01c02-a12a-45f0-9187-f358515e39c8","Type":"ContainerStarted","Data":"08986256113f0a932b20a42b8861cec0828b930e9fc254751872af5f8d8ade5a"} Dec 04 10:02:07 crc kubenswrapper[4707]: I1204 10:02:07.643265 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:07 crc kubenswrapper[4707]: I1204 10:02:07.664783 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-share-share0-0" podStartSLOduration=4.664763241 podStartE2EDuration="4.664763241s" podCreationTimestamp="2025-12-04 10:02:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:02:07.659779203 +0000 UTC m=+1427.095601710" watchObservedRunningTime="2025-12-04 10:02:07.664763241 +0000 UTC m=+1427.100585748" Dec 04 10:02:07 crc kubenswrapper[4707]: I1204 10:02:07.681012 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-api-0" podStartSLOduration=4.680991445 podStartE2EDuration="4.680991445s" podCreationTimestamp="2025-12-04 10:02:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:02:07.67770075 +0000 UTC m=+1427.113523267" watchObservedRunningTime="2025-12-04 10:02:07.680991445 +0000 UTC m=+1427.116813952" Dec 04 10:02:14 crc kubenswrapper[4707]: I1204 10:02:14.202760 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:14 crc kubenswrapper[4707]: I1204 10:02:14.325488 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:25 crc kubenswrapper[4707]: I1204 10:02:25.727063 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:25 crc kubenswrapper[4707]: I1204 10:02:25.910256 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:26 crc kubenswrapper[4707]: I1204 10:02:26.057185 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.335881 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-share-share1-0"] Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.337533 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.339866 4707 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-share-share1-config-data" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.353754 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share1-0"] Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.437437 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.437694 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-config-data\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.437866 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-ceph\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.437969 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d70cf714-12f6-448e-94f4-6b276f8de691-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.438086 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-scripts\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.438185 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcrhl\" (UniqueName: \"kubernetes.io/projected/d70cf714-12f6-448e-94f4-6b276f8de691-kube-api-access-jcrhl\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.438281 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d70cf714-12f6-448e-94f4-6b276f8de691-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.539894 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-ceph\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.540266 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d70cf714-12f6-448e-94f4-6b276f8de691-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.540390 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-scripts\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.540490 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcrhl\" (UniqueName: \"kubernetes.io/projected/d70cf714-12f6-448e-94f4-6b276f8de691-kube-api-access-jcrhl\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.540591 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d70cf714-12f6-448e-94f4-6b276f8de691-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.540387 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d70cf714-12f6-448e-94f4-6b276f8de691-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.540720 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.540878 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-config-data\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.540872 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d70cf714-12f6-448e-94f4-6b276f8de691-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.546541 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-scripts\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.546689 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.546704 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-ceph\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.547436 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-config-data\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.562729 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcrhl\" (UniqueName: \"kubernetes.io/projected/d70cf714-12f6-448e-94f4-6b276f8de691-kube-api-access-jcrhl\") pod \"manila-share-share1-0\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:28 crc kubenswrapper[4707]: I1204 10:02:28.683036 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:29 crc kubenswrapper[4707]: I1204 10:02:29.093551 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share1-0"] Dec 04 10:02:29 crc kubenswrapper[4707]: W1204 10:02:29.094513 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd70cf714_12f6_448e_94f4_6b276f8de691.slice/crio-fdae7e8eb0ec6481c4d728354293461d3149a9706d2c6e60bd0eca0fdb687da0 WatchSource:0}: Error finding container fdae7e8eb0ec6481c4d728354293461d3149a9706d2c6e60bd0eca0fdb687da0: Status 404 returned error can't find the container with id fdae7e8eb0ec6481c4d728354293461d3149a9706d2c6e60bd0eca0fdb687da0 Dec 04 10:02:29 crc kubenswrapper[4707]: I1204 10:02:29.838600 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share1-0" event={"ID":"d70cf714-12f6-448e-94f4-6b276f8de691","Type":"ContainerStarted","Data":"d6029416efadf2c5692f2cd03e224597476490928fa25df6c1d9001e3a44d024"} Dec 04 10:02:29 crc kubenswrapper[4707]: I1204 10:02:29.839043 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share1-0" event={"ID":"d70cf714-12f6-448e-94f4-6b276f8de691","Type":"ContainerStarted","Data":"de99fd634368952611ede0e4ce6fd09c0d28e14bb39e0f8cd3ebd88e109c0ad6"} Dec 04 10:02:29 crc kubenswrapper[4707]: I1204 10:02:29.839060 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share1-0" event={"ID":"d70cf714-12f6-448e-94f4-6b276f8de691","Type":"ContainerStarted","Data":"fdae7e8eb0ec6481c4d728354293461d3149a9706d2c6e60bd0eca0fdb687da0"} Dec 04 10:02:29 crc kubenswrapper[4707]: I1204 10:02:29.862608 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-share-share1-0" podStartSLOduration=1.862592477 podStartE2EDuration="1.862592477s" podCreationTimestamp="2025-12-04 10:02:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:02:29.859012763 +0000 UTC m=+1449.294835270" watchObservedRunningTime="2025-12-04 10:02:29.862592477 +0000 UTC m=+1449.298414984" Dec 04 10:02:30 crc kubenswrapper[4707]: I1204 10:02:30.817728 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 10:02:30 crc kubenswrapper[4707]: I1204 10:02:30.817972 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 10:02:38 crc kubenswrapper[4707]: I1204 10:02:38.683771 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:50 crc kubenswrapper[4707]: I1204 10:02:50.163873 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:51 crc kubenswrapper[4707]: I1204 10:02:51.203814 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:02:51 crc kubenswrapper[4707]: I1204 10:02:51.205471 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share0-0" podUID="6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" containerName="manila-share" containerID="cri-o://e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5" gracePeriod=30 Dec 04 10:02:51 crc kubenswrapper[4707]: I1204 10:02:51.206028 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share0-0" podUID="6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" containerName="probe" containerID="cri-o://b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669" gracePeriod=30 Dec 04 10:02:51 crc kubenswrapper[4707]: I1204 10:02:51.898748 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.033239 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-config-data-custom\") pod \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.033321 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-config-data\") pod \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.033404 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gnhw\" (UniqueName: \"kubernetes.io/projected/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-kube-api-access-7gnhw\") pod \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.033486 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-var-lib-manila\") pod \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.033513 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-scripts\") pod \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.033583 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-ceph\") pod \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.033603 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-etc-machine-id\") pod \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\" (UID: \"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998\") " Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.033660 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" (UID: "6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.033806 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" (UID: "6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.034634 4707 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.034659 4707 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-var-lib-manila\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.038266 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-scripts" (OuterVolumeSpecName: "scripts") pod "6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" (UID: "6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.038685 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" (UID: "6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.038929 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-ceph" (OuterVolumeSpecName: "ceph") pod "6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" (UID: "6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.040018 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-kube-api-access-7gnhw" (OuterVolumeSpecName: "kube-api-access-7gnhw") pod "6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" (UID: "6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998"). InnerVolumeSpecName "kube-api-access-7gnhw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.098490 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-config-data" (OuterVolumeSpecName: "config-data") pod "6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" (UID: "6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.136396 4707 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-ceph\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.136441 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.136461 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gnhw\" (UniqueName: \"kubernetes.io/projected/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-kube-api-access-7gnhw\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.136477 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.136490 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.152221 4707 generic.go:334] "Generic (PLEG): container finished" podID="6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" containerID="b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669" exitCode=0 Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.152251 4707 generic.go:334] "Generic (PLEG): container finished" podID="6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" containerID="e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5" exitCode=1 Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.152269 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998","Type":"ContainerDied","Data":"b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669"} Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.152285 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.152356 4707 scope.go:117] "RemoveContainer" containerID="b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.152328 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998","Type":"ContainerDied","Data":"e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5"} Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.152477 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998","Type":"ContainerDied","Data":"2577dd4b27f4328d336ddeac457f9919c80113d2516e18012bc29b474ee94686"} Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.172218 4707 scope.go:117] "RemoveContainer" containerID="e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.179272 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.184604 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.194812 4707 scope.go:117] "RemoveContainer" containerID="b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669" Dec 04 10:02:52 crc kubenswrapper[4707]: E1204 10:02:52.195542 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669\": container with ID starting with b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669 not found: ID does not exist" containerID="b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.195608 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669"} err="failed to get container status \"b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669\": rpc error: code = NotFound desc = could not find container \"b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669\": container with ID starting with b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669 not found: ID does not exist" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.195645 4707 scope.go:117] "RemoveContainer" containerID="e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5" Dec 04 10:02:52 crc kubenswrapper[4707]: E1204 10:02:52.196092 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5\": container with ID starting with e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5 not found: ID does not exist" containerID="e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.196153 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5"} err="failed to get container status \"e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5\": rpc error: code = NotFound desc = could not find container \"e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5\": container with ID starting with e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5 not found: ID does not exist" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.196191 4707 scope.go:117] "RemoveContainer" containerID="b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.196551 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669"} err="failed to get container status \"b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669\": rpc error: code = NotFound desc = could not find container \"b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669\": container with ID starting with b6e49c9ae15bd257e62b01e6729d66981c07aa8aa1efbb36d7d0e939c45c1669 not found: ID does not exist" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.196572 4707 scope.go:117] "RemoveContainer" containerID="e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.196850 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5"} err="failed to get container status \"e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5\": rpc error: code = NotFound desc = could not find container \"e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5\": container with ID starting with e0a13b189ee075d259288f2c6343d7bfb1b83fde8dd895e986fd9403e70699e5 not found: ID does not exist" Dec 04 10:02:52 crc kubenswrapper[4707]: I1204 10:02:52.854470 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" path="/var/lib/kubelet/pods/6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998/volumes" Dec 04 10:02:53 crc kubenswrapper[4707]: I1204 10:02:53.797233 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx"] Dec 04 10:02:53 crc kubenswrapper[4707]: E1204 10:02:53.797921 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" containerName="manila-share" Dec 04 10:02:53 crc kubenswrapper[4707]: I1204 10:02:53.797943 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" containerName="manila-share" Dec 04 10:02:53 crc kubenswrapper[4707]: E1204 10:02:53.797953 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" containerName="probe" Dec 04 10:02:53 crc kubenswrapper[4707]: I1204 10:02:53.797960 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" containerName="probe" Dec 04 10:02:53 crc kubenswrapper[4707]: I1204 10:02:53.798099 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" containerName="probe" Dec 04 10:02:53 crc kubenswrapper[4707]: I1204 10:02:53.798133 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ea2b5cc-c6cd-4352-9720-8c4d3dcb5998" containerName="manila-share" Dec 04 10:02:53 crc kubenswrapper[4707]: I1204 10:02:53.798686 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" Dec 04 10:02:53 crc kubenswrapper[4707]: I1204 10:02:53.809938 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx"] Dec 04 10:02:53 crc kubenswrapper[4707]: I1204 10:02:53.961672 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-job-config-data\") pod \"manila-service-cleanup-n5b5h655-lpdjx\" (UID: \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" Dec 04 10:02:53 crc kubenswrapper[4707]: I1204 10:02:53.961764 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m85t\" (UniqueName: \"kubernetes.io/projected/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-kube-api-access-9m85t\") pod \"manila-service-cleanup-n5b5h655-lpdjx\" (UID: \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" Dec 04 10:02:53 crc kubenswrapper[4707]: I1204 10:02:53.961864 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-config-data\") pod \"manila-service-cleanup-n5b5h655-lpdjx\" (UID: \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.063367 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-job-config-data\") pod \"manila-service-cleanup-n5b5h655-lpdjx\" (UID: \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.063495 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m85t\" (UniqueName: \"kubernetes.io/projected/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-kube-api-access-9m85t\") pod \"manila-service-cleanup-n5b5h655-lpdjx\" (UID: \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.063557 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-config-data\") pod \"manila-service-cleanup-n5b5h655-lpdjx\" (UID: \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.068755 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-config-data\") pod \"manila-service-cleanup-n5b5h655-lpdjx\" (UID: \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.073923 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-job-config-data\") pod \"manila-service-cleanup-n5b5h655-lpdjx\" (UID: \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.089999 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m85t\" (UniqueName: \"kubernetes.io/projected/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-kube-api-access-9m85t\") pod \"manila-service-cleanup-n5b5h655-lpdjx\" (UID: \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.160783 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.348635 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx"] Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.535690 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx"] Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.549401 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-db-sync-8kt4d"] Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.555876 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-db-sync-8kt4d"] Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.573895 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.574123 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-0" podUID="dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" containerName="manila-scheduler" containerID="cri-o://295b9de08976158bd86dacb86fed922df8171174e67b75b610e9bfbace7ee012" gracePeriod=30 Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.574508 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-0" podUID="dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" containerName="probe" containerID="cri-o://79f9072ed2c27a235fe6f99800b9c4524cc38ef458cb22dd31efb1728c78bc73" gracePeriod=30 Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.581006 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share1-0"] Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.581239 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share1-0" podUID="d70cf714-12f6-448e-94f4-6b276f8de691" containerName="manila-share" containerID="cri-o://de99fd634368952611ede0e4ce6fd09c0d28e14bb39e0f8cd3ebd88e109c0ad6" gracePeriod=30 Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.581590 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share1-0" podUID="d70cf714-12f6-448e-94f4-6b276f8de691" containerName="probe" containerID="cri-o://d6029416efadf2c5692f2cd03e224597476490928fa25df6c1d9001e3a44d024" gracePeriod=30 Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.650641 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila5c72-account-delete-slvg4"] Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.652421 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.666729 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.667141 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-0" podUID="4bc01c02-a12a-45f0-9187-f358515e39c8" containerName="manila-api-log" containerID="cri-o://55d81b2cd6f2fcbab49f93f1ec02a6f8cac85c94a809cd1012f73472b668b412" gracePeriod=30 Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.667350 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-0" podUID="4bc01c02-a12a-45f0-9187-f358515e39c8" containerName="manila-api" containerID="cri-o://08986256113f0a932b20a42b8861cec0828b930e9fc254751872af5f8d8ade5a" gracePeriod=30 Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.674242 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila5c72-account-delete-slvg4"] Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.784301 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf8gv\" (UniqueName: \"kubernetes.io/projected/ba1dbef8-a400-43ad-b24d-45e13ad4eff4-kube-api-access-cf8gv\") pod \"manila5c72-account-delete-slvg4\" (UID: \"ba1dbef8-a400-43ad-b24d-45e13ad4eff4\") " pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.784736 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba1dbef8-a400-43ad-b24d-45e13ad4eff4-operator-scripts\") pod \"manila5c72-account-delete-slvg4\" (UID: \"ba1dbef8-a400-43ad-b24d-45e13ad4eff4\") " pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.853805 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b01e4ed0-19ec-40e3-86fc-7cb2784515ad" path="/var/lib/kubelet/pods/b01e4ed0-19ec-40e3-86fc-7cb2784515ad/volumes" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.886694 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba1dbef8-a400-43ad-b24d-45e13ad4eff4-operator-scripts\") pod \"manila5c72-account-delete-slvg4\" (UID: \"ba1dbef8-a400-43ad-b24d-45e13ad4eff4\") " pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.886824 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf8gv\" (UniqueName: \"kubernetes.io/projected/ba1dbef8-a400-43ad-b24d-45e13ad4eff4-kube-api-access-cf8gv\") pod \"manila5c72-account-delete-slvg4\" (UID: \"ba1dbef8-a400-43ad-b24d-45e13ad4eff4\") " pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.887883 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba1dbef8-a400-43ad-b24d-45e13ad4eff4-operator-scripts\") pod \"manila5c72-account-delete-slvg4\" (UID: \"ba1dbef8-a400-43ad-b24d-45e13ad4eff4\") " pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.909099 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf8gv\" (UniqueName: \"kubernetes.io/projected/ba1dbef8-a400-43ad-b24d-45e13ad4eff4-kube-api-access-cf8gv\") pod \"manila5c72-account-delete-slvg4\" (UID: \"ba1dbef8-a400-43ad-b24d-45e13ad4eff4\") " pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" Dec 04 10:02:54 crc kubenswrapper[4707]: I1204 10:02:54.995364 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.185075 4707 generic.go:334] "Generic (PLEG): container finished" podID="d70cf714-12f6-448e-94f4-6b276f8de691" containerID="d6029416efadf2c5692f2cd03e224597476490928fa25df6c1d9001e3a44d024" exitCode=0 Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.185168 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share1-0" event={"ID":"d70cf714-12f6-448e-94f4-6b276f8de691","Type":"ContainerDied","Data":"d6029416efadf2c5692f2cd03e224597476490928fa25df6c1d9001e3a44d024"} Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.190052 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" event={"ID":"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce","Type":"ContainerStarted","Data":"7aeec5f4629f22d5df89a3ec8527b7358a36700ffe1267cf8fe91f8386b04bdd"} Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.190114 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" event={"ID":"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce","Type":"ContainerStarted","Data":"ebf2c452d49636e565706dea8acfa3240a0edc8a4caeb559af3a7ff6b234a51a"} Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.190175 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" podUID="f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce" containerName="manila-service-cleanup-n5b5h655" containerID="cri-o://7aeec5f4629f22d5df89a3ec8527b7358a36700ffe1267cf8fe91f8386b04bdd" gracePeriod=30 Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.192819 4707 generic.go:334] "Generic (PLEG): container finished" podID="dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" containerID="79f9072ed2c27a235fe6f99800b9c4524cc38ef458cb22dd31efb1728c78bc73" exitCode=0 Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.192894 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29","Type":"ContainerDied","Data":"79f9072ed2c27a235fe6f99800b9c4524cc38ef458cb22dd31efb1728c78bc73"} Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.195725 4707 generic.go:334] "Generic (PLEG): container finished" podID="4bc01c02-a12a-45f0-9187-f358515e39c8" containerID="55d81b2cd6f2fcbab49f93f1ec02a6f8cac85c94a809cd1012f73472b668b412" exitCode=143 Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.195779 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"4bc01c02-a12a-45f0-9187-f358515e39c8","Type":"ContainerDied","Data":"55d81b2cd6f2fcbab49f93f1ec02a6f8cac85c94a809cd1012f73472b668b412"} Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.216419 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" podStartSLOduration=2.216393404 podStartE2EDuration="2.216393404s" podCreationTimestamp="2025-12-04 10:02:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:02:55.215713053 +0000 UTC m=+1474.651535560" watchObservedRunningTime="2025-12-04 10:02:55.216393404 +0000 UTC m=+1474.652215911" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.482900 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila5c72-account-delete-slvg4"] Dec 04 10:02:55 crc kubenswrapper[4707]: W1204 10:02:55.500885 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba1dbef8_a400_43ad_b24d_45e13ad4eff4.slice/crio-a7e7da1658ced9172865cc7aacfc00e164d1d7244d02d955f28f2684cb3af3d0 WatchSource:0}: Error finding container a7e7da1658ced9172865cc7aacfc00e164d1d7244d02d955f28f2684cb3af3d0: Status 404 returned error can't find the container with id a7e7da1658ced9172865cc7aacfc00e164d1d7244d02d955f28f2684cb3af3d0 Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.562109 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.577635 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb"] Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.577842 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" podUID="946cc19a-22ff-4a9b-862f-f9471d794bb0" containerName="manager" containerID="cri-o://dea4af4e97d8032f50bc97a7c4eaa03a420e6f12c96395fc960168eb6b20a072" gracePeriod=10 Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.698359 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-config-data\") pod \"d70cf714-12f6-448e-94f4-6b276f8de691\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.698425 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d70cf714-12f6-448e-94f4-6b276f8de691-var-lib-manila\") pod \"d70cf714-12f6-448e-94f4-6b276f8de691\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.698529 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-ceph\") pod \"d70cf714-12f6-448e-94f4-6b276f8de691\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.698584 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d70cf714-12f6-448e-94f4-6b276f8de691-etc-machine-id\") pod \"d70cf714-12f6-448e-94f4-6b276f8de691\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.698607 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-scripts\") pod \"d70cf714-12f6-448e-94f4-6b276f8de691\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.698639 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-config-data-custom\") pod \"d70cf714-12f6-448e-94f4-6b276f8de691\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.698669 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcrhl\" (UniqueName: \"kubernetes.io/projected/d70cf714-12f6-448e-94f4-6b276f8de691-kube-api-access-jcrhl\") pod \"d70cf714-12f6-448e-94f4-6b276f8de691\" (UID: \"d70cf714-12f6-448e-94f4-6b276f8de691\") " Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.700038 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d70cf714-12f6-448e-94f4-6b276f8de691-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d70cf714-12f6-448e-94f4-6b276f8de691" (UID: "d70cf714-12f6-448e-94f4-6b276f8de691"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.700352 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d70cf714-12f6-448e-94f4-6b276f8de691-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "d70cf714-12f6-448e-94f4-6b276f8de691" (UID: "d70cf714-12f6-448e-94f4-6b276f8de691"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.727555 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-scripts" (OuterVolumeSpecName: "scripts") pod "d70cf714-12f6-448e-94f4-6b276f8de691" (UID: "d70cf714-12f6-448e-94f4-6b276f8de691"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.727663 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-ceph" (OuterVolumeSpecName: "ceph") pod "d70cf714-12f6-448e-94f4-6b276f8de691" (UID: "d70cf714-12f6-448e-94f4-6b276f8de691"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.738482 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d70cf714-12f6-448e-94f4-6b276f8de691" (UID: "d70cf714-12f6-448e-94f4-6b276f8de691"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.754258 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d70cf714-12f6-448e-94f4-6b276f8de691-kube-api-access-jcrhl" (OuterVolumeSpecName: "kube-api-access-jcrhl") pod "d70cf714-12f6-448e-94f4-6b276f8de691" (UID: "d70cf714-12f6-448e-94f4-6b276f8de691"). InnerVolumeSpecName "kube-api-access-jcrhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.802159 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcrhl\" (UniqueName: \"kubernetes.io/projected/d70cf714-12f6-448e-94f4-6b276f8de691-kube-api-access-jcrhl\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.802195 4707 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d70cf714-12f6-448e-94f4-6b276f8de691-var-lib-manila\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.802205 4707 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-ceph\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.802214 4707 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d70cf714-12f6-448e-94f4-6b276f8de691-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.802222 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.802230 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.832106 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-config-data" (OuterVolumeSpecName: "config-data") pod "d70cf714-12f6-448e-94f4-6b276f8de691" (UID: "d70cf714-12f6-448e-94f4-6b276f8de691"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.854401 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/manila-operator-index-hlvgt"] Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.854616 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/manila-operator-index-hlvgt" podUID="0e6e6fc4-562a-4ca6-af63-40f7a45400cc" containerName="registry-server" containerID="cri-o://7032c18c1b048acf87017a94ece2a3160acaa8d1768de1c0935f19bb7975c949" gracePeriod=30 Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.905127 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d70cf714-12f6-448e-94f4-6b276f8de691-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.913899 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr"] Dec 04 10:02:55 crc kubenswrapper[4707]: I1204 10:02:55.930498 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/5b47e7dc595e86cb4913e987c5131ebb429963ecaaa2c9d2c9e76025bfdktvr"] Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.216290 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" event={"ID":"ba1dbef8-a400-43ad-b24d-45e13ad4eff4","Type":"ContainerStarted","Data":"a4d03d85ae7d1a62b7d71861cd080c56ffe7eb4fe75ebb078c52e903ebe0b1a6"} Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.216718 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" event={"ID":"ba1dbef8-a400-43ad-b24d-45e13ad4eff4","Type":"ContainerStarted","Data":"a7e7da1658ced9172865cc7aacfc00e164d1d7244d02d955f28f2684cb3af3d0"} Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.227432 4707 generic.go:334] "Generic (PLEG): container finished" podID="946cc19a-22ff-4a9b-862f-f9471d794bb0" containerID="dea4af4e97d8032f50bc97a7c4eaa03a420e6f12c96395fc960168eb6b20a072" exitCode=0 Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.227565 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" event={"ID":"946cc19a-22ff-4a9b-862f-f9471d794bb0","Type":"ContainerDied","Data":"dea4af4e97d8032f50bc97a7c4eaa03a420e6f12c96395fc960168eb6b20a072"} Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.241711 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" podStartSLOduration=2.241691802 podStartE2EDuration="2.241691802s" podCreationTimestamp="2025-12-04 10:02:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:02:56.237249222 +0000 UTC m=+1475.673071749" watchObservedRunningTime="2025-12-04 10:02:56.241691802 +0000 UTC m=+1475.677514309" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.242109 4707 generic.go:334] "Generic (PLEG): container finished" podID="d70cf714-12f6-448e-94f4-6b276f8de691" containerID="de99fd634368952611ede0e4ce6fd09c0d28e14bb39e0f8cd3ebd88e109c0ad6" exitCode=1 Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.242169 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share1-0" event={"ID":"d70cf714-12f6-448e-94f4-6b276f8de691","Type":"ContainerDied","Data":"de99fd634368952611ede0e4ce6fd09c0d28e14bb39e0f8cd3ebd88e109c0ad6"} Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.242196 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share1-0" event={"ID":"d70cf714-12f6-448e-94f4-6b276f8de691","Type":"ContainerDied","Data":"fdae7e8eb0ec6481c4d728354293461d3149a9706d2c6e60bd0eca0fdb687da0"} Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.242214 4707 scope.go:117] "RemoveContainer" containerID="d6029416efadf2c5692f2cd03e224597476490928fa25df6c1d9001e3a44d024" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.242326 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share1-0" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.252108 4707 generic.go:334] "Generic (PLEG): container finished" podID="0e6e6fc4-562a-4ca6-af63-40f7a45400cc" containerID="7032c18c1b048acf87017a94ece2a3160acaa8d1768de1c0935f19bb7975c949" exitCode=0 Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.252192 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-index-hlvgt" event={"ID":"0e6e6fc4-562a-4ca6-af63-40f7a45400cc","Type":"ContainerDied","Data":"7032c18c1b048acf87017a94ece2a3160acaa8d1768de1c0935f19bb7975c949"} Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.327052 4707 scope.go:117] "RemoveContainer" containerID="de99fd634368952611ede0e4ce6fd09c0d28e14bb39e0f8cd3ebd88e109c0ad6" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.356226 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share1-0"] Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.363855 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-index-hlvgt" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.377976 4707 scope.go:117] "RemoveContainer" containerID="d6029416efadf2c5692f2cd03e224597476490928fa25df6c1d9001e3a44d024" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.378386 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-share-share1-0"] Dec 04 10:02:56 crc kubenswrapper[4707]: E1204 10:02:56.378491 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6029416efadf2c5692f2cd03e224597476490928fa25df6c1d9001e3a44d024\": container with ID starting with d6029416efadf2c5692f2cd03e224597476490928fa25df6c1d9001e3a44d024 not found: ID does not exist" containerID="d6029416efadf2c5692f2cd03e224597476490928fa25df6c1d9001e3a44d024" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.378529 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6029416efadf2c5692f2cd03e224597476490928fa25df6c1d9001e3a44d024"} err="failed to get container status \"d6029416efadf2c5692f2cd03e224597476490928fa25df6c1d9001e3a44d024\": rpc error: code = NotFound desc = could not find container \"d6029416efadf2c5692f2cd03e224597476490928fa25df6c1d9001e3a44d024\": container with ID starting with d6029416efadf2c5692f2cd03e224597476490928fa25df6c1d9001e3a44d024 not found: ID does not exist" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.378557 4707 scope.go:117] "RemoveContainer" containerID="de99fd634368952611ede0e4ce6fd09c0d28e14bb39e0f8cd3ebd88e109c0ad6" Dec 04 10:02:56 crc kubenswrapper[4707]: E1204 10:02:56.379638 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de99fd634368952611ede0e4ce6fd09c0d28e14bb39e0f8cd3ebd88e109c0ad6\": container with ID starting with de99fd634368952611ede0e4ce6fd09c0d28e14bb39e0f8cd3ebd88e109c0ad6 not found: ID does not exist" containerID="de99fd634368952611ede0e4ce6fd09c0d28e14bb39e0f8cd3ebd88e109c0ad6" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.379660 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de99fd634368952611ede0e4ce6fd09c0d28e14bb39e0f8cd3ebd88e109c0ad6"} err="failed to get container status \"de99fd634368952611ede0e4ce6fd09c0d28e14bb39e0f8cd3ebd88e109c0ad6\": rpc error: code = NotFound desc = could not find container \"de99fd634368952611ede0e4ce6fd09c0d28e14bb39e0f8cd3ebd88e109c0ad6\": container with ID starting with de99fd634368952611ede0e4ce6fd09c0d28e14bb39e0f8cd3ebd88e109c0ad6 not found: ID does not exist" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.517938 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghg57\" (UniqueName: \"kubernetes.io/projected/0e6e6fc4-562a-4ca6-af63-40f7a45400cc-kube-api-access-ghg57\") pod \"0e6e6fc4-562a-4ca6-af63-40f7a45400cc\" (UID: \"0e6e6fc4-562a-4ca6-af63-40f7a45400cc\") " Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.523447 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e6e6fc4-562a-4ca6-af63-40f7a45400cc-kube-api-access-ghg57" (OuterVolumeSpecName: "kube-api-access-ghg57") pod "0e6e6fc4-562a-4ca6-af63-40f7a45400cc" (UID: "0e6e6fc4-562a-4ca6-af63-40f7a45400cc"). InnerVolumeSpecName "kube-api-access-ghg57". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.567764 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.620598 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghg57\" (UniqueName: \"kubernetes.io/projected/0e6e6fc4-562a-4ca6-af63-40f7a45400cc-kube-api-access-ghg57\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.721944 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/946cc19a-22ff-4a9b-862f-f9471d794bb0-apiservice-cert\") pod \"946cc19a-22ff-4a9b-862f-f9471d794bb0\" (UID: \"946cc19a-22ff-4a9b-862f-f9471d794bb0\") " Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.722048 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/946cc19a-22ff-4a9b-862f-f9471d794bb0-webhook-cert\") pod \"946cc19a-22ff-4a9b-862f-f9471d794bb0\" (UID: \"946cc19a-22ff-4a9b-862f-f9471d794bb0\") " Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.722081 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzq4j\" (UniqueName: \"kubernetes.io/projected/946cc19a-22ff-4a9b-862f-f9471d794bb0-kube-api-access-pzq4j\") pod \"946cc19a-22ff-4a9b-862f-f9471d794bb0\" (UID: \"946cc19a-22ff-4a9b-862f-f9471d794bb0\") " Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.724990 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/946cc19a-22ff-4a9b-862f-f9471d794bb0-kube-api-access-pzq4j" (OuterVolumeSpecName: "kube-api-access-pzq4j") pod "946cc19a-22ff-4a9b-862f-f9471d794bb0" (UID: "946cc19a-22ff-4a9b-862f-f9471d794bb0"). InnerVolumeSpecName "kube-api-access-pzq4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.726531 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/946cc19a-22ff-4a9b-862f-f9471d794bb0-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "946cc19a-22ff-4a9b-862f-f9471d794bb0" (UID: "946cc19a-22ff-4a9b-862f-f9471d794bb0"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.726565 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/946cc19a-22ff-4a9b-862f-f9471d794bb0-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "946cc19a-22ff-4a9b-862f-f9471d794bb0" (UID: "946cc19a-22ff-4a9b-862f-f9471d794bb0"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.824026 4707 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/946cc19a-22ff-4a9b-862f-f9471d794bb0-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.824072 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzq4j\" (UniqueName: \"kubernetes.io/projected/946cc19a-22ff-4a9b-862f-f9471d794bb0-kube-api-access-pzq4j\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.824085 4707 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/946cc19a-22ff-4a9b-862f-f9471d794bb0-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.854460 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f8839cc-9d4a-4cba-ab65-b5413391e72f" path="/var/lib/kubelet/pods/4f8839cc-9d4a-4cba-ab65-b5413391e72f/volumes" Dec 04 10:02:56 crc kubenswrapper[4707]: I1204 10:02:56.855374 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d70cf714-12f6-448e-94f4-6b276f8de691" path="/var/lib/kubelet/pods/d70cf714-12f6-448e-94f4-6b276f8de691/volumes" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.229950 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.260743 4707 generic.go:334] "Generic (PLEG): container finished" podID="ba1dbef8-a400-43ad-b24d-45e13ad4eff4" containerID="a4d03d85ae7d1a62b7d71861cd080c56ffe7eb4fe75ebb078c52e903ebe0b1a6" exitCode=0 Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.260805 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" event={"ID":"ba1dbef8-a400-43ad-b24d-45e13ad4eff4","Type":"ContainerDied","Data":"a4d03d85ae7d1a62b7d71861cd080c56ffe7eb4fe75ebb078c52e903ebe0b1a6"} Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.262613 4707 generic.go:334] "Generic (PLEG): container finished" podID="dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" containerID="295b9de08976158bd86dacb86fed922df8171174e67b75b610e9bfbace7ee012" exitCode=0 Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.262676 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29","Type":"ContainerDied","Data":"295b9de08976158bd86dacb86fed922df8171174e67b75b610e9bfbace7ee012"} Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.262702 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29","Type":"ContainerDied","Data":"a48a005290ca9c2e49aa98024e8b8b6032b5ae844d4615804bec3db57318c88b"} Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.262719 4707 scope.go:117] "RemoveContainer" containerID="79f9072ed2c27a235fe6f99800b9c4524cc38ef458cb22dd31efb1728c78bc73" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.262799 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.265792 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" event={"ID":"946cc19a-22ff-4a9b-862f-f9471d794bb0","Type":"ContainerDied","Data":"e999c685535aa53b32f47ff0d28c9415d63f66cd236bee8876dac193d90cd72a"} Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.265829 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.269946 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-index-hlvgt" event={"ID":"0e6e6fc4-562a-4ca6-af63-40f7a45400cc","Type":"ContainerDied","Data":"a1e2e6d48e3bf7dd70dd44111f217039e8c31b59aa54c2e8a76fb31d4a3a6ad6"} Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.270013 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-index-hlvgt" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.290385 4707 scope.go:117] "RemoveContainer" containerID="295b9de08976158bd86dacb86fed922df8171174e67b75b610e9bfbace7ee012" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.307430 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb"] Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.324235 4707 scope.go:117] "RemoveContainer" containerID="79f9072ed2c27a235fe6f99800b9c4524cc38ef458cb22dd31efb1728c78bc73" Dec 04 10:02:57 crc kubenswrapper[4707]: E1204 10:02:57.324790 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79f9072ed2c27a235fe6f99800b9c4524cc38ef458cb22dd31efb1728c78bc73\": container with ID starting with 79f9072ed2c27a235fe6f99800b9c4524cc38ef458cb22dd31efb1728c78bc73 not found: ID does not exist" containerID="79f9072ed2c27a235fe6f99800b9c4524cc38ef458cb22dd31efb1728c78bc73" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.324819 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79f9072ed2c27a235fe6f99800b9c4524cc38ef458cb22dd31efb1728c78bc73"} err="failed to get container status \"79f9072ed2c27a235fe6f99800b9c4524cc38ef458cb22dd31efb1728c78bc73\": rpc error: code = NotFound desc = could not find container \"79f9072ed2c27a235fe6f99800b9c4524cc38ef458cb22dd31efb1728c78bc73\": container with ID starting with 79f9072ed2c27a235fe6f99800b9c4524cc38ef458cb22dd31efb1728c78bc73 not found: ID does not exist" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.324840 4707 scope.go:117] "RemoveContainer" containerID="295b9de08976158bd86dacb86fed922df8171174e67b75b610e9bfbace7ee012" Dec 04 10:02:57 crc kubenswrapper[4707]: E1204 10:02:57.325133 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"295b9de08976158bd86dacb86fed922df8171174e67b75b610e9bfbace7ee012\": container with ID starting with 295b9de08976158bd86dacb86fed922df8171174e67b75b610e9bfbace7ee012 not found: ID does not exist" containerID="295b9de08976158bd86dacb86fed922df8171174e67b75b610e9bfbace7ee012" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.325241 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"295b9de08976158bd86dacb86fed922df8171174e67b75b610e9bfbace7ee012"} err="failed to get container status \"295b9de08976158bd86dacb86fed922df8171174e67b75b610e9bfbace7ee012\": rpc error: code = NotFound desc = could not find container \"295b9de08976158bd86dacb86fed922df8171174e67b75b610e9bfbace7ee012\": container with ID starting with 295b9de08976158bd86dacb86fed922df8171174e67b75b610e9bfbace7ee012 not found: ID does not exist" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.325306 4707 scope.go:117] "RemoveContainer" containerID="dea4af4e97d8032f50bc97a7c4eaa03a420e6f12c96395fc960168eb6b20a072" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.326815 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7b6f6fdcbf-xmdtb"] Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.330608 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-scripts\") pod \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.330682 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-config-data\") pod \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.330701 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-etc-machine-id\") pod \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.330749 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-827pz\" (UniqueName: \"kubernetes.io/projected/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-kube-api-access-827pz\") pod \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.330837 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-config-data-custom\") pod \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\" (UID: \"dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29\") " Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.331201 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" (UID: "dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.334237 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" (UID: "dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.339771 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/manila-operator-index-hlvgt"] Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.349609 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/manila-operator-index-hlvgt"] Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.357616 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-kube-api-access-827pz" (OuterVolumeSpecName: "kube-api-access-827pz") pod "dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" (UID: "dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29"). InnerVolumeSpecName "kube-api-access-827pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.362865 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-scripts" (OuterVolumeSpecName: "scripts") pod "dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" (UID: "dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.417595 4707 scope.go:117] "RemoveContainer" containerID="7032c18c1b048acf87017a94ece2a3160acaa8d1768de1c0935f19bb7975c949" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.428603 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-config-data" (OuterVolumeSpecName: "config-data") pod "dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" (UID: "dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.432185 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.432307 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.432396 4707 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.432467 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.432532 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-827pz\" (UniqueName: \"kubernetes.io/projected/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29-kube-api-access-827pz\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.597839 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:02:57 crc kubenswrapper[4707]: I1204 10:02:57.603499 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.146423 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.245601 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-scripts\") pod \"4bc01c02-a12a-45f0-9187-f358515e39c8\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.245974 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4bc01c02-a12a-45f0-9187-f358515e39c8-etc-machine-id\") pod \"4bc01c02-a12a-45f0-9187-f358515e39c8\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.246019 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvv2b\" (UniqueName: \"kubernetes.io/projected/4bc01c02-a12a-45f0-9187-f358515e39c8-kube-api-access-lvv2b\") pod \"4bc01c02-a12a-45f0-9187-f358515e39c8\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.246046 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bc01c02-a12a-45f0-9187-f358515e39c8-logs\") pod \"4bc01c02-a12a-45f0-9187-f358515e39c8\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.246080 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-config-data-custom\") pod \"4bc01c02-a12a-45f0-9187-f358515e39c8\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.246228 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-config-data\") pod \"4bc01c02-a12a-45f0-9187-f358515e39c8\" (UID: \"4bc01c02-a12a-45f0-9187-f358515e39c8\") " Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.246557 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bc01c02-a12a-45f0-9187-f358515e39c8-logs" (OuterVolumeSpecName: "logs") pod "4bc01c02-a12a-45f0-9187-f358515e39c8" (UID: "4bc01c02-a12a-45f0-9187-f358515e39c8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.246788 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4bc01c02-a12a-45f0-9187-f358515e39c8-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "4bc01c02-a12a-45f0-9187-f358515e39c8" (UID: "4bc01c02-a12a-45f0-9187-f358515e39c8"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.251513 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4bc01c02-a12a-45f0-9187-f358515e39c8" (UID: "4bc01c02-a12a-45f0-9187-f358515e39c8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.256804 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-scripts" (OuterVolumeSpecName: "scripts") pod "4bc01c02-a12a-45f0-9187-f358515e39c8" (UID: "4bc01c02-a12a-45f0-9187-f358515e39c8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.259405 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bc01c02-a12a-45f0-9187-f358515e39c8-kube-api-access-lvv2b" (OuterVolumeSpecName: "kube-api-access-lvv2b") pod "4bc01c02-a12a-45f0-9187-f358515e39c8" (UID: "4bc01c02-a12a-45f0-9187-f358515e39c8"). InnerVolumeSpecName "kube-api-access-lvv2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.283476 4707 generic.go:334] "Generic (PLEG): container finished" podID="4bc01c02-a12a-45f0-9187-f358515e39c8" containerID="08986256113f0a932b20a42b8861cec0828b930e9fc254751872af5f8d8ade5a" exitCode=0 Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.283530 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"4bc01c02-a12a-45f0-9187-f358515e39c8","Type":"ContainerDied","Data":"08986256113f0a932b20a42b8861cec0828b930e9fc254751872af5f8d8ade5a"} Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.283553 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"4bc01c02-a12a-45f0-9187-f358515e39c8","Type":"ContainerDied","Data":"ad2cfff69fd8e1e9a0f29333bfab5e3441776b489d97824aeb15cb4fc7fc5492"} Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.283569 4707 scope.go:117] "RemoveContainer" containerID="08986256113f0a932b20a42b8861cec0828b930e9fc254751872af5f8d8ade5a" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.283774 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.291053 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-config-data" (OuterVolumeSpecName: "config-data") pod "4bc01c02-a12a-45f0-9187-f358515e39c8" (UID: "4bc01c02-a12a-45f0-9187-f358515e39c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.333350 4707 scope.go:117] "RemoveContainer" containerID="55d81b2cd6f2fcbab49f93f1ec02a6f8cac85c94a809cd1012f73472b668b412" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.348082 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.348125 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.348138 4707 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4bc01c02-a12a-45f0-9187-f358515e39c8-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.348150 4707 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bc01c02-a12a-45f0-9187-f358515e39c8-logs\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.348163 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvv2b\" (UniqueName: \"kubernetes.io/projected/4bc01c02-a12a-45f0-9187-f358515e39c8-kube-api-access-lvv2b\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.348174 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bc01c02-a12a-45f0-9187-f358515e39c8-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.354806 4707 scope.go:117] "RemoveContainer" containerID="08986256113f0a932b20a42b8861cec0828b930e9fc254751872af5f8d8ade5a" Dec 04 10:02:58 crc kubenswrapper[4707]: E1204 10:02:58.358781 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08986256113f0a932b20a42b8861cec0828b930e9fc254751872af5f8d8ade5a\": container with ID starting with 08986256113f0a932b20a42b8861cec0828b930e9fc254751872af5f8d8ade5a not found: ID does not exist" containerID="08986256113f0a932b20a42b8861cec0828b930e9fc254751872af5f8d8ade5a" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.358841 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08986256113f0a932b20a42b8861cec0828b930e9fc254751872af5f8d8ade5a"} err="failed to get container status \"08986256113f0a932b20a42b8861cec0828b930e9fc254751872af5f8d8ade5a\": rpc error: code = NotFound desc = could not find container \"08986256113f0a932b20a42b8861cec0828b930e9fc254751872af5f8d8ade5a\": container with ID starting with 08986256113f0a932b20a42b8861cec0828b930e9fc254751872af5f8d8ade5a not found: ID does not exist" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.358872 4707 scope.go:117] "RemoveContainer" containerID="55d81b2cd6f2fcbab49f93f1ec02a6f8cac85c94a809cd1012f73472b668b412" Dec 04 10:02:58 crc kubenswrapper[4707]: E1204 10:02:58.362295 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55d81b2cd6f2fcbab49f93f1ec02a6f8cac85c94a809cd1012f73472b668b412\": container with ID starting with 55d81b2cd6f2fcbab49f93f1ec02a6f8cac85c94a809cd1012f73472b668b412 not found: ID does not exist" containerID="55d81b2cd6f2fcbab49f93f1ec02a6f8cac85c94a809cd1012f73472b668b412" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.362325 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55d81b2cd6f2fcbab49f93f1ec02a6f8cac85c94a809cd1012f73472b668b412"} err="failed to get container status \"55d81b2cd6f2fcbab49f93f1ec02a6f8cac85c94a809cd1012f73472b668b412\": rpc error: code = NotFound desc = could not find container \"55d81b2cd6f2fcbab49f93f1ec02a6f8cac85c94a809cd1012f73472b668b412\": container with ID starting with 55d81b2cd6f2fcbab49f93f1ec02a6f8cac85c94a809cd1012f73472b668b412 not found: ID does not exist" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.513691 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.617264 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.621883 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.652466 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cf8gv\" (UniqueName: \"kubernetes.io/projected/ba1dbef8-a400-43ad-b24d-45e13ad4eff4-kube-api-access-cf8gv\") pod \"ba1dbef8-a400-43ad-b24d-45e13ad4eff4\" (UID: \"ba1dbef8-a400-43ad-b24d-45e13ad4eff4\") " Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.653115 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba1dbef8-a400-43ad-b24d-45e13ad4eff4-operator-scripts\") pod \"ba1dbef8-a400-43ad-b24d-45e13ad4eff4\" (UID: \"ba1dbef8-a400-43ad-b24d-45e13ad4eff4\") " Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.653713 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba1dbef8-a400-43ad-b24d-45e13ad4eff4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ba1dbef8-a400-43ad-b24d-45e13ad4eff4" (UID: "ba1dbef8-a400-43ad-b24d-45e13ad4eff4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.655571 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba1dbef8-a400-43ad-b24d-45e13ad4eff4-kube-api-access-cf8gv" (OuterVolumeSpecName: "kube-api-access-cf8gv") pod "ba1dbef8-a400-43ad-b24d-45e13ad4eff4" (UID: "ba1dbef8-a400-43ad-b24d-45e13ad4eff4"). InnerVolumeSpecName "kube-api-access-cf8gv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.754542 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba1dbef8-a400-43ad-b24d-45e13ad4eff4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.754588 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cf8gv\" (UniqueName: \"kubernetes.io/projected/ba1dbef8-a400-43ad-b24d-45e13ad4eff4-kube-api-access-cf8gv\") on node \"crc\" DevicePath \"\"" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.853225 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e6e6fc4-562a-4ca6-af63-40f7a45400cc" path="/var/lib/kubelet/pods/0e6e6fc4-562a-4ca6-af63-40f7a45400cc/volumes" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.854057 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bc01c02-a12a-45f0-9187-f358515e39c8" path="/var/lib/kubelet/pods/4bc01c02-a12a-45f0-9187-f358515e39c8/volumes" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.854715 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="946cc19a-22ff-4a9b-862f-f9471d794bb0" path="/var/lib/kubelet/pods/946cc19a-22ff-4a9b-862f-f9471d794bb0/volumes" Dec 04 10:02:58 crc kubenswrapper[4707]: I1204 10:02:58.855857 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" path="/var/lib/kubelet/pods/dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29/volumes" Dec 04 10:02:59 crc kubenswrapper[4707]: I1204 10:02:59.300190 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" event={"ID":"ba1dbef8-a400-43ad-b24d-45e13ad4eff4","Type":"ContainerDied","Data":"a7e7da1658ced9172865cc7aacfc00e164d1d7244d02d955f28f2684cb3af3d0"} Dec 04 10:02:59 crc kubenswrapper[4707]: I1204 10:02:59.300244 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7e7da1658ced9172865cc7aacfc00e164d1d7244d02d955f28f2684cb3af3d0" Dec 04 10:02:59 crc kubenswrapper[4707]: I1204 10:02:59.300251 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila5c72-account-delete-slvg4" Dec 04 10:02:59 crc kubenswrapper[4707]: I1204 10:02:59.661601 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-db-create-d2zn9"] Dec 04 10:02:59 crc kubenswrapper[4707]: I1204 10:02:59.673866 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-db-create-d2zn9"] Dec 04 10:02:59 crc kubenswrapper[4707]: I1204 10:02:59.679233 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila5c72-account-delete-slvg4"] Dec 04 10:02:59 crc kubenswrapper[4707]: I1204 10:02:59.683790 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-5c72-account-create-update-z9r5d"] Dec 04 10:02:59 crc kubenswrapper[4707]: I1204 10:02:59.688495 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila5c72-account-delete-slvg4"] Dec 04 10:02:59 crc kubenswrapper[4707]: I1204 10:02:59.693715 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-5c72-account-create-update-z9r5d"] Dec 04 10:03:00 crc kubenswrapper[4707]: I1204 10:03:00.817377 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 10:03:00 crc kubenswrapper[4707]: I1204 10:03:00.817735 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 10:03:00 crc kubenswrapper[4707]: I1204 10:03:00.817792 4707 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 10:03:00 crc kubenswrapper[4707]: I1204 10:03:00.818358 4707 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a40438c3e1376df9722e48d67ff0c6c89cb3a6cefb6a18fc0adad90335e07b60"} pod="openshift-machine-config-operator/machine-config-daemon-c244z" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 10:03:00 crc kubenswrapper[4707]: I1204 10:03:00.818407 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" containerID="cri-o://a40438c3e1376df9722e48d67ff0c6c89cb3a6cefb6a18fc0adad90335e07b60" gracePeriod=600 Dec 04 10:03:00 crc kubenswrapper[4707]: I1204 10:03:00.852553 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d8fa69c-f195-4b3b-9f65-dd81969b024b" path="/var/lib/kubelet/pods/0d8fa69c-f195-4b3b-9f65-dd81969b024b/volumes" Dec 04 10:03:00 crc kubenswrapper[4707]: I1204 10:03:00.853241 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29bb829a-15e0-461b-96ce-8f42ec71a111" path="/var/lib/kubelet/pods/29bb829a-15e0-461b-96ce-8f42ec71a111/volumes" Dec 04 10:03:00 crc kubenswrapper[4707]: I1204 10:03:00.853875 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba1dbef8-a400-43ad-b24d-45e13ad4eff4" path="/var/lib/kubelet/pods/ba1dbef8-a400-43ad-b24d-45e13ad4eff4/volumes" Dec 04 10:03:01 crc kubenswrapper[4707]: I1204 10:03:01.317618 4707 generic.go:334] "Generic (PLEG): container finished" podID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerID="a40438c3e1376df9722e48d67ff0c6c89cb3a6cefb6a18fc0adad90335e07b60" exitCode=0 Dec 04 10:03:01 crc kubenswrapper[4707]: I1204 10:03:01.317765 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerDied","Data":"a40438c3e1376df9722e48d67ff0c6c89cb3a6cefb6a18fc0adad90335e07b60"} Dec 04 10:03:01 crc kubenswrapper[4707]: I1204 10:03:01.317986 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerStarted","Data":"c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8"} Dec 04 10:03:01 crc kubenswrapper[4707]: I1204 10:03:01.318006 4707 scope.go:117] "RemoveContainer" containerID="e65929a2443d875614d7254731d235fbb949ab647cb08abccd54722f9ef3c29b" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.828777 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone-db-sync-b4xlv"] Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.829097 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone-bootstrap-v9ng7"] Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.831116 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/keystone-db-sync-b4xlv"] Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.837269 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/keystone-bootstrap-v9ng7"] Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.842990 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone-6b5497869-bq6qk"] Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.843243 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" podUID="37fbf63e-d62a-4ea7-b15c-c34c72aab829" containerName="keystone-api" containerID="cri-o://8ea8246d529a17dc453e3611571e24306eb9996f15394da1492c37262bdd47fb" gracePeriod=30 Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.857775 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a2f408d-5beb-4e5c-ac7f-547ea5383f7d" path="/var/lib/kubelet/pods/3a2f408d-5beb-4e5c-ac7f-547ea5383f7d/volumes" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.858418 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d84e4321-66f6-4c4b-842b-ada408c4c446" path="/var/lib/kubelet/pods/d84e4321-66f6-4c4b-842b-ada408c4c446/volumes" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.858936 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone-cron-29414041-7mcts"] Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.863760 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/keystone-cron-29414041-7mcts"] Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.904905 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/keystone3277-account-delete-bgx8f"] Dec 04 10:03:02 crc kubenswrapper[4707]: E1204 10:03:02.905151 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d70cf714-12f6-448e-94f4-6b276f8de691" containerName="manila-share" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905166 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="d70cf714-12f6-448e-94f4-6b276f8de691" containerName="manila-share" Dec 04 10:03:02 crc kubenswrapper[4707]: E1204 10:03:02.905180 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d70cf714-12f6-448e-94f4-6b276f8de691" containerName="probe" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905186 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="d70cf714-12f6-448e-94f4-6b276f8de691" containerName="probe" Dec 04 10:03:02 crc kubenswrapper[4707]: E1204 10:03:02.905195 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" containerName="manila-scheduler" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905201 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" containerName="manila-scheduler" Dec 04 10:03:02 crc kubenswrapper[4707]: E1204 10:03:02.905208 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" containerName="probe" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905214 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" containerName="probe" Dec 04 10:03:02 crc kubenswrapper[4707]: E1204 10:03:02.905224 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="946cc19a-22ff-4a9b-862f-f9471d794bb0" containerName="manager" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905230 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="946cc19a-22ff-4a9b-862f-f9471d794bb0" containerName="manager" Dec 04 10:03:02 crc kubenswrapper[4707]: E1204 10:03:02.905238 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba1dbef8-a400-43ad-b24d-45e13ad4eff4" containerName="mariadb-account-delete" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905243 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba1dbef8-a400-43ad-b24d-45e13ad4eff4" containerName="mariadb-account-delete" Dec 04 10:03:02 crc kubenswrapper[4707]: E1204 10:03:02.905252 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bc01c02-a12a-45f0-9187-f358515e39c8" containerName="manila-api-log" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905258 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bc01c02-a12a-45f0-9187-f358515e39c8" containerName="manila-api-log" Dec 04 10:03:02 crc kubenswrapper[4707]: E1204 10:03:02.905266 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e6e6fc4-562a-4ca6-af63-40f7a45400cc" containerName="registry-server" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905272 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e6e6fc4-562a-4ca6-af63-40f7a45400cc" containerName="registry-server" Dec 04 10:03:02 crc kubenswrapper[4707]: E1204 10:03:02.905283 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bc01c02-a12a-45f0-9187-f358515e39c8" containerName="manila-api" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905289 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bc01c02-a12a-45f0-9187-f358515e39c8" containerName="manila-api" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905411 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" containerName="probe" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905421 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="946cc19a-22ff-4a9b-862f-f9471d794bb0" containerName="manager" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905429 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bc01c02-a12a-45f0-9187-f358515e39c8" containerName="manila-api" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905439 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="d70cf714-12f6-448e-94f4-6b276f8de691" containerName="probe" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905446 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="d70cf714-12f6-448e-94f4-6b276f8de691" containerName="manila-share" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905468 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd0ff7c1-a8d7-449f-8fa6-b5a0be813a29" containerName="manila-scheduler" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905475 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e6e6fc4-562a-4ca6-af63-40f7a45400cc" containerName="registry-server" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905482 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba1dbef8-a400-43ad-b24d-45e13ad4eff4" containerName="mariadb-account-delete" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905491 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bc01c02-a12a-45f0-9187-f358515e39c8" containerName="manila-api-log" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.905873 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" Dec 04 10:03:02 crc kubenswrapper[4707]: I1204 10:03:02.921997 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone3277-account-delete-bgx8f"] Dec 04 10:03:03 crc kubenswrapper[4707]: I1204 10:03:03.032460 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5bwj\" (UniqueName: \"kubernetes.io/projected/a5838998-f07f-429c-916c-6ad39edd46cd-kube-api-access-c5bwj\") pod \"keystone3277-account-delete-bgx8f\" (UID: \"a5838998-f07f-429c-916c-6ad39edd46cd\") " pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" Dec 04 10:03:03 crc kubenswrapper[4707]: I1204 10:03:03.032521 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a5838998-f07f-429c-916c-6ad39edd46cd-operator-scripts\") pod \"keystone3277-account-delete-bgx8f\" (UID: \"a5838998-f07f-429c-916c-6ad39edd46cd\") " pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" Dec 04 10:03:03 crc kubenswrapper[4707]: I1204 10:03:03.133797 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5bwj\" (UniqueName: \"kubernetes.io/projected/a5838998-f07f-429c-916c-6ad39edd46cd-kube-api-access-c5bwj\") pod \"keystone3277-account-delete-bgx8f\" (UID: \"a5838998-f07f-429c-916c-6ad39edd46cd\") " pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" Dec 04 10:03:03 crc kubenswrapper[4707]: I1204 10:03:03.134055 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a5838998-f07f-429c-916c-6ad39edd46cd-operator-scripts\") pod \"keystone3277-account-delete-bgx8f\" (UID: \"a5838998-f07f-429c-916c-6ad39edd46cd\") " pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" Dec 04 10:03:03 crc kubenswrapper[4707]: I1204 10:03:03.135033 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a5838998-f07f-429c-916c-6ad39edd46cd-operator-scripts\") pod \"keystone3277-account-delete-bgx8f\" (UID: \"a5838998-f07f-429c-916c-6ad39edd46cd\") " pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" Dec 04 10:03:03 crc kubenswrapper[4707]: I1204 10:03:03.153903 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5bwj\" (UniqueName: \"kubernetes.io/projected/a5838998-f07f-429c-916c-6ad39edd46cd-kube-api-access-c5bwj\") pod \"keystone3277-account-delete-bgx8f\" (UID: \"a5838998-f07f-429c-916c-6ad39edd46cd\") " pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" Dec 04 10:03:03 crc kubenswrapper[4707]: I1204 10:03:03.223987 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" Dec 04 10:03:03 crc kubenswrapper[4707]: I1204 10:03:03.637137 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone3277-account-delete-bgx8f"] Dec 04 10:03:03 crc kubenswrapper[4707]: W1204 10:03:03.646582 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5838998_f07f_429c_916c_6ad39edd46cd.slice/crio-c30dfd932e004baf579978ac88ccbe3ceefad08d25e7038c6d725991bcd69821 WatchSource:0}: Error finding container c30dfd932e004baf579978ac88ccbe3ceefad08d25e7038c6d725991bcd69821: Status 404 returned error can't find the container with id c30dfd932e004baf579978ac88ccbe3ceefad08d25e7038c6d725991bcd69821 Dec 04 10:03:03 crc kubenswrapper[4707]: I1204 10:03:03.734764 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/openstack-galera-1"] Dec 04 10:03:03 crc kubenswrapper[4707]: I1204 10:03:03.739880 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/openstack-galera-0"] Dec 04 10:03:03 crc kubenswrapper[4707]: I1204 10:03:03.744956 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/openstack-galera-2"] Dec 04 10:03:04 crc kubenswrapper[4707]: I1204 10:03:04.028761 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/openstack-galera-2" podUID="3b373699-1303-4b1a-914d-7764376f5b38" containerName="galera" containerID="cri-o://1a2d2991641ebb8ef7717910d319fd599a76fc4337fc5f614f2d5f4db8c04789" gracePeriod=30 Dec 04 10:03:04 crc kubenswrapper[4707]: I1204 10:03:04.345560 4707 generic.go:334] "Generic (PLEG): container finished" podID="a5838998-f07f-429c-916c-6ad39edd46cd" containerID="3b1da7efa76ab15ffbc3392c39b911058cfc959c4e75427bfb13ecd6485d0f82" exitCode=1 Dec 04 10:03:04 crc kubenswrapper[4707]: I1204 10:03:04.345611 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" event={"ID":"a5838998-f07f-429c-916c-6ad39edd46cd","Type":"ContainerDied","Data":"3b1da7efa76ab15ffbc3392c39b911058cfc959c4e75427bfb13ecd6485d0f82"} Dec 04 10:03:04 crc kubenswrapper[4707]: I1204 10:03:04.345656 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" event={"ID":"a5838998-f07f-429c-916c-6ad39edd46cd","Type":"ContainerStarted","Data":"c30dfd932e004baf579978ac88ccbe3ceefad08d25e7038c6d725991bcd69821"} Dec 04 10:03:04 crc kubenswrapper[4707]: I1204 10:03:04.346093 4707 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" secret="" err="secret \"galera-openstack-dockercfg-t8dpj\" not found" Dec 04 10:03:04 crc kubenswrapper[4707]: I1204 10:03:04.346135 4707 scope.go:117] "RemoveContainer" containerID="3b1da7efa76ab15ffbc3392c39b911058cfc959c4e75427bfb13ecd6485d0f82" Dec 04 10:03:04 crc kubenswrapper[4707]: E1204 10:03:04.453515 4707 configmap.go:193] Couldn't get configMap manila-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 04 10:03:04 crc kubenswrapper[4707]: E1204 10:03:04.453622 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a5838998-f07f-429c-916c-6ad39edd46cd-operator-scripts podName:a5838998-f07f-429c-916c-6ad39edd46cd nodeName:}" failed. No retries permitted until 2025-12-04 10:03:04.953597189 +0000 UTC m=+1484.389419696 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/a5838998-f07f-429c-916c-6ad39edd46cd-operator-scripts") pod "keystone3277-account-delete-bgx8f" (UID: "a5838998-f07f-429c-916c-6ad39edd46cd") : configmap "openstack-scripts" not found Dec 04 10:03:04 crc kubenswrapper[4707]: I1204 10:03:04.481306 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/memcached-0"] Dec 04 10:03:04 crc kubenswrapper[4707]: I1204 10:03:04.482544 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/memcached-0" podUID="b38182e9-ec12-42a7-b506-83ba39b9042c" containerName="memcached" containerID="cri-o://b5467eccc66bdc7b1b97924d1b861fa1970784955e8b414c0a057b3e1345c1ea" gracePeriod=30 Dec 04 10:03:04 crc kubenswrapper[4707]: I1204 10:03:04.854048 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="452395d2-196f-415a-9d87-47e73b329310" path="/var/lib/kubelet/pods/452395d2-196f-415a-9d87-47e73b329310/volumes" Dec 04 10:03:04 crc kubenswrapper[4707]: I1204 10:03:04.892670 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-2" Dec 04 10:03:04 crc kubenswrapper[4707]: E1204 10:03:04.963485 4707 configmap.go:193] Couldn't get configMap manila-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 04 10:03:04 crc kubenswrapper[4707]: E1204 10:03:04.963548 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a5838998-f07f-429c-916c-6ad39edd46cd-operator-scripts podName:a5838998-f07f-429c-916c-6ad39edd46cd nodeName:}" failed. No retries permitted until 2025-12-04 10:03:05.963532312 +0000 UTC m=+1485.399354819 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/a5838998-f07f-429c-916c-6ad39edd46cd-operator-scripts") pod "keystone3277-account-delete-bgx8f" (UID: "a5838998-f07f-429c-916c-6ad39edd46cd") : configmap "openstack-scripts" not found Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.015161 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/rabbitmq-server-0"] Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.063300 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3b373699-1303-4b1a-914d-7764376f5b38-config-data-generated\") pod \"3b373699-1303-4b1a-914d-7764376f5b38\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.063400 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpdbf\" (UniqueName: \"kubernetes.io/projected/3b373699-1303-4b1a-914d-7764376f5b38-kube-api-access-jpdbf\") pod \"3b373699-1303-4b1a-914d-7764376f5b38\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.063432 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"3b373699-1303-4b1a-914d-7764376f5b38\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.063452 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-operator-scripts\") pod \"3b373699-1303-4b1a-914d-7764376f5b38\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.063480 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-config-data-default\") pod \"3b373699-1303-4b1a-914d-7764376f5b38\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.063499 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-kolla-config\") pod \"3b373699-1303-4b1a-914d-7764376f5b38\" (UID: \"3b373699-1303-4b1a-914d-7764376f5b38\") " Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.063683 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b373699-1303-4b1a-914d-7764376f5b38-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "3b373699-1303-4b1a-914d-7764376f5b38" (UID: "3b373699-1303-4b1a-914d-7764376f5b38"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.063811 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3b373699-1303-4b1a-914d-7764376f5b38-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.063950 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "3b373699-1303-4b1a-914d-7764376f5b38" (UID: "3b373699-1303-4b1a-914d-7764376f5b38"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.064218 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "3b373699-1303-4b1a-914d-7764376f5b38" (UID: "3b373699-1303-4b1a-914d-7764376f5b38"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.064392 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3b373699-1303-4b1a-914d-7764376f5b38" (UID: "3b373699-1303-4b1a-914d-7764376f5b38"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.074264 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "mysql-db") pod "3b373699-1303-4b1a-914d-7764376f5b38" (UID: "3b373699-1303-4b1a-914d-7764376f5b38"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.075294 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b373699-1303-4b1a-914d-7764376f5b38-kube-api-access-jpdbf" (OuterVolumeSpecName: "kube-api-access-jpdbf") pod "3b373699-1303-4b1a-914d-7764376f5b38" (UID: "3b373699-1303-4b1a-914d-7764376f5b38"). InnerVolumeSpecName "kube-api-access-jpdbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.165750 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpdbf\" (UniqueName: \"kubernetes.io/projected/3b373699-1303-4b1a-914d-7764376f5b38-kube-api-access-jpdbf\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.166098 4707 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.166112 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.166121 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.166129 4707 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3b373699-1303-4b1a-914d-7764376f5b38-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.179044 4707 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.266994 4707 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.370632 4707 generic.go:334] "Generic (PLEG): container finished" podID="3b373699-1303-4b1a-914d-7764376f5b38" containerID="1a2d2991641ebb8ef7717910d319fd599a76fc4337fc5f614f2d5f4db8c04789" exitCode=0 Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.370697 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-2" event={"ID":"3b373699-1303-4b1a-914d-7764376f5b38","Type":"ContainerDied","Data":"1a2d2991641ebb8ef7717910d319fd599a76fc4337fc5f614f2d5f4db8c04789"} Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.370723 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-2" event={"ID":"3b373699-1303-4b1a-914d-7764376f5b38","Type":"ContainerDied","Data":"81aea55eb079371032df154c113d28917c33fb500c4298c482209697d5a04eee"} Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.370740 4707 scope.go:117] "RemoveContainer" containerID="1a2d2991641ebb8ef7717910d319fd599a76fc4337fc5f614f2d5f4db8c04789" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.370864 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-2" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.386956 4707 generic.go:334] "Generic (PLEG): container finished" podID="a5838998-f07f-429c-916c-6ad39edd46cd" containerID="96acdea9d1adf226492841fd35c730bd6c7be4592a253706df5ab8955a2d3c72" exitCode=1 Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.387011 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" event={"ID":"a5838998-f07f-429c-916c-6ad39edd46cd","Type":"ContainerDied","Data":"96acdea9d1adf226492841fd35c730bd6c7be4592a253706df5ab8955a2d3c72"} Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.387568 4707 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" secret="" err="secret \"galera-openstack-dockercfg-t8dpj\" not found" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.387616 4707 scope.go:117] "RemoveContainer" containerID="96acdea9d1adf226492841fd35c730bd6c7be4592a253706df5ab8955a2d3c72" Dec 04 10:03:05 crc kubenswrapper[4707]: E1204 10:03:05.387927 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=keystone3277-account-delete-bgx8f_manila-kuttl-tests(a5838998-f07f-429c-916c-6ad39edd46cd)\"" pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" podUID="a5838998-f07f-429c-916c-6ad39edd46cd" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.394285 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/rabbitmq-server-0"] Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.440856 4707 scope.go:117] "RemoveContainer" containerID="04810cb0bfd1d640bbb6e137d22e1b808e18a55232a1d09578f80a883551de32" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.442510 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/openstack-galera-2"] Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.446960 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/rabbitmq-server-0" podUID="51dd2aae-c620-4d95-b261-1cb6065096e3" containerName="rabbitmq" containerID="cri-o://10f9c023d7770a7c66c2a81642e90a309ba6299d8ca08937883515507bbd2e02" gracePeriod=604800 Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.452907 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/openstack-galera-2"] Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.472454 4707 scope.go:117] "RemoveContainer" containerID="1a2d2991641ebb8ef7717910d319fd599a76fc4337fc5f614f2d5f4db8c04789" Dec 04 10:03:05 crc kubenswrapper[4707]: E1204 10:03:05.476017 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a2d2991641ebb8ef7717910d319fd599a76fc4337fc5f614f2d5f4db8c04789\": container with ID starting with 1a2d2991641ebb8ef7717910d319fd599a76fc4337fc5f614f2d5f4db8c04789 not found: ID does not exist" containerID="1a2d2991641ebb8ef7717910d319fd599a76fc4337fc5f614f2d5f4db8c04789" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.476070 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a2d2991641ebb8ef7717910d319fd599a76fc4337fc5f614f2d5f4db8c04789"} err="failed to get container status \"1a2d2991641ebb8ef7717910d319fd599a76fc4337fc5f614f2d5f4db8c04789\": rpc error: code = NotFound desc = could not find container \"1a2d2991641ebb8ef7717910d319fd599a76fc4337fc5f614f2d5f4db8c04789\": container with ID starting with 1a2d2991641ebb8ef7717910d319fd599a76fc4337fc5f614f2d5f4db8c04789 not found: ID does not exist" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.476097 4707 scope.go:117] "RemoveContainer" containerID="04810cb0bfd1d640bbb6e137d22e1b808e18a55232a1d09578f80a883551de32" Dec 04 10:03:05 crc kubenswrapper[4707]: E1204 10:03:05.476460 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04810cb0bfd1d640bbb6e137d22e1b808e18a55232a1d09578f80a883551de32\": container with ID starting with 04810cb0bfd1d640bbb6e137d22e1b808e18a55232a1d09578f80a883551de32 not found: ID does not exist" containerID="04810cb0bfd1d640bbb6e137d22e1b808e18a55232a1d09578f80a883551de32" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.476507 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04810cb0bfd1d640bbb6e137d22e1b808e18a55232a1d09578f80a883551de32"} err="failed to get container status \"04810cb0bfd1d640bbb6e137d22e1b808e18a55232a1d09578f80a883551de32\": rpc error: code = NotFound desc = could not find container \"04810cb0bfd1d640bbb6e137d22e1b808e18a55232a1d09578f80a883551de32\": container with ID starting with 04810cb0bfd1d640bbb6e137d22e1b808e18a55232a1d09578f80a883551de32 not found: ID does not exist" Dec 04 10:03:05 crc kubenswrapper[4707]: I1204 10:03:05.476539 4707 scope.go:117] "RemoveContainer" containerID="3b1da7efa76ab15ffbc3392c39b911058cfc959c4e75427bfb13ecd6485d0f82" Dec 04 10:03:05 crc kubenswrapper[4707]: E1204 10:03:05.977199 4707 configmap.go:193] Couldn't get configMap manila-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 04 10:03:05 crc kubenswrapper[4707]: E1204 10:03:05.977619 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a5838998-f07f-429c-916c-6ad39edd46cd-operator-scripts podName:a5838998-f07f-429c-916c-6ad39edd46cd nodeName:}" failed. No retries permitted until 2025-12-04 10:03:07.977590394 +0000 UTC m=+1487.413412901 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/a5838998-f07f-429c-916c-6ad39edd46cd-operator-scripts") pod "keystone3277-account-delete-bgx8f" (UID: "a5838998-f07f-429c-916c-6ad39edd46cd") : configmap "openstack-scripts" not found Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.014289 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/ceph"] Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.014599 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/ceph" podUID="ae5166d7-2ccb-4e29-8066-7b355eb947cc" containerName="ceph" containerID="cri-o://eae13d613b80c0afda601e087c0158271a01da5d49b078a2d0f91b27aeda12aa" gracePeriod=30 Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.060479 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/openstack-galera-1" podUID="2538c764-a696-4ce4-95fa-58c782e0b71f" containerName="galera" containerID="cri-o://4d5787839e341a844c199843a3505da5ae9f7972cd12ca9fb557c6dfbe113e96" gracePeriod=28 Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.361354 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.395951 4707 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" secret="" err="secret \"galera-openstack-dockercfg-t8dpj\" not found" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.396000 4707 scope.go:117] "RemoveContainer" containerID="96acdea9d1adf226492841fd35c730bd6c7be4592a253706df5ab8955a2d3c72" Dec 04 10:03:06 crc kubenswrapper[4707]: E1204 10:03:06.396357 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=keystone3277-account-delete-bgx8f_manila-kuttl-tests(a5838998-f07f-429c-916c-6ad39edd46cd)\"" pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" podUID="a5838998-f07f-429c-916c-6ad39edd46cd" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.397529 4707 generic.go:334] "Generic (PLEG): container finished" podID="b38182e9-ec12-42a7-b506-83ba39b9042c" containerID="b5467eccc66bdc7b1b97924d1b861fa1970784955e8b414c0a057b3e1345c1ea" exitCode=0 Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.397587 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/memcached-0" event={"ID":"b38182e9-ec12-42a7-b506-83ba39b9042c","Type":"ContainerDied","Data":"b5467eccc66bdc7b1b97924d1b861fa1970784955e8b414c0a057b3e1345c1ea"} Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.399868 4707 generic.go:334] "Generic (PLEG): container finished" podID="37fbf63e-d62a-4ea7-b15c-c34c72aab829" containerID="8ea8246d529a17dc453e3611571e24306eb9996f15394da1492c37262bdd47fb" exitCode=0 Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.399902 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" event={"ID":"37fbf63e-d62a-4ea7-b15c-c34c72aab829","Type":"ContainerDied","Data":"8ea8246d529a17dc453e3611571e24306eb9996f15394da1492c37262bdd47fb"} Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.399936 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" event={"ID":"37fbf63e-d62a-4ea7-b15c-c34c72aab829","Type":"ContainerDied","Data":"a4f3183a4f65b026ec7e1d9b6e008b0d3f32c03d5c15f3c236411a860414c031"} Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.399957 4707 scope.go:117] "RemoveContainer" containerID="8ea8246d529a17dc453e3611571e24306eb9996f15394da1492c37262bdd47fb" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.400151 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-6b5497869-bq6qk" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.431805 4707 scope.go:117] "RemoveContainer" containerID="8ea8246d529a17dc453e3611571e24306eb9996f15394da1492c37262bdd47fb" Dec 04 10:03:06 crc kubenswrapper[4707]: E1204 10:03:06.432322 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ea8246d529a17dc453e3611571e24306eb9996f15394da1492c37262bdd47fb\": container with ID starting with 8ea8246d529a17dc453e3611571e24306eb9996f15394da1492c37262bdd47fb not found: ID does not exist" containerID="8ea8246d529a17dc453e3611571e24306eb9996f15394da1492c37262bdd47fb" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.432410 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ea8246d529a17dc453e3611571e24306eb9996f15394da1492c37262bdd47fb"} err="failed to get container status \"8ea8246d529a17dc453e3611571e24306eb9996f15394da1492c37262bdd47fb\": rpc error: code = NotFound desc = could not find container \"8ea8246d529a17dc453e3611571e24306eb9996f15394da1492c37262bdd47fb\": container with ID starting with 8ea8246d529a17dc453e3611571e24306eb9996f15394da1492c37262bdd47fb not found: ID does not exist" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.482228 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-scripts\") pod \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.482293 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-config-data\") pod \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.482446 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-credential-keys\") pod \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.482474 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9qg4\" (UniqueName: \"kubernetes.io/projected/37fbf63e-d62a-4ea7-b15c-c34c72aab829-kube-api-access-s9qg4\") pod \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.482489 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-fernet-keys\") pod \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\" (UID: \"37fbf63e-d62a-4ea7-b15c-c34c72aab829\") " Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.489741 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "37fbf63e-d62a-4ea7-b15c-c34c72aab829" (UID: "37fbf63e-d62a-4ea7-b15c-c34c72aab829"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.489761 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "37fbf63e-d62a-4ea7-b15c-c34c72aab829" (UID: "37fbf63e-d62a-4ea7-b15c-c34c72aab829"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.489835 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37fbf63e-d62a-4ea7-b15c-c34c72aab829-kube-api-access-s9qg4" (OuterVolumeSpecName: "kube-api-access-s9qg4") pod "37fbf63e-d62a-4ea7-b15c-c34c72aab829" (UID: "37fbf63e-d62a-4ea7-b15c-c34c72aab829"). InnerVolumeSpecName "kube-api-access-s9qg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.490122 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-scripts" (OuterVolumeSpecName: "scripts") pod "37fbf63e-d62a-4ea7-b15c-c34c72aab829" (UID: "37fbf63e-d62a-4ea7-b15c-c34c72aab829"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.502646 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-config-data" (OuterVolumeSpecName: "config-data") pod "37fbf63e-d62a-4ea7-b15c-c34c72aab829" (UID: "37fbf63e-d62a-4ea7-b15c-c34c72aab829"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.550422 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/memcached-0" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.584321 4707 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.584379 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9qg4\" (UniqueName: \"kubernetes.io/projected/37fbf63e-d62a-4ea7-b15c-c34c72aab829-kube-api-access-s9qg4\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.584391 4707 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.584399 4707 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.584407 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37fbf63e-d62a-4ea7-b15c-c34c72aab829-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.667024 4707 prober.go:107] "Probe failed" probeType="Readiness" pod="manila-kuttl-tests/rabbitmq-server-0" podUID="51dd2aae-c620-4d95-b261-1cb6065096e3" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.71:5672: connect: connection refused" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.685080 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmx9d\" (UniqueName: \"kubernetes.io/projected/b38182e9-ec12-42a7-b506-83ba39b9042c-kube-api-access-cmx9d\") pod \"b38182e9-ec12-42a7-b506-83ba39b9042c\" (UID: \"b38182e9-ec12-42a7-b506-83ba39b9042c\") " Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.685159 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b38182e9-ec12-42a7-b506-83ba39b9042c-kolla-config\") pod \"b38182e9-ec12-42a7-b506-83ba39b9042c\" (UID: \"b38182e9-ec12-42a7-b506-83ba39b9042c\") " Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.685249 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b38182e9-ec12-42a7-b506-83ba39b9042c-config-data\") pod \"b38182e9-ec12-42a7-b506-83ba39b9042c\" (UID: \"b38182e9-ec12-42a7-b506-83ba39b9042c\") " Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.685924 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b38182e9-ec12-42a7-b506-83ba39b9042c-config-data" (OuterVolumeSpecName: "config-data") pod "b38182e9-ec12-42a7-b506-83ba39b9042c" (UID: "b38182e9-ec12-42a7-b506-83ba39b9042c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.686398 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b38182e9-ec12-42a7-b506-83ba39b9042c-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "b38182e9-ec12-42a7-b506-83ba39b9042c" (UID: "b38182e9-ec12-42a7-b506-83ba39b9042c"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.687774 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b38182e9-ec12-42a7-b506-83ba39b9042c-kube-api-access-cmx9d" (OuterVolumeSpecName: "kube-api-access-cmx9d") pod "b38182e9-ec12-42a7-b506-83ba39b9042c" (UID: "b38182e9-ec12-42a7-b506-83ba39b9042c"). InnerVolumeSpecName "kube-api-access-cmx9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.729682 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone-6b5497869-bq6qk"] Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.735831 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/keystone-6b5497869-bq6qk"] Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.787517 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b38182e9-ec12-42a7-b506-83ba39b9042c-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.787547 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmx9d\" (UniqueName: \"kubernetes.io/projected/b38182e9-ec12-42a7-b506-83ba39b9042c-kube-api-access-cmx9d\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.787556 4707 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b38182e9-ec12-42a7-b506-83ba39b9042c-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.857249 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37fbf63e-d62a-4ea7-b15c-c34c72aab829" path="/var/lib/kubelet/pods/37fbf63e-d62a-4ea7-b15c-c34c72aab829/volumes" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.857993 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b373699-1303-4b1a-914d-7764376f5b38" path="/var/lib/kubelet/pods/3b373699-1303-4b1a-914d-7764376f5b38/volumes" Dec 04 10:03:06 crc kubenswrapper[4707]: I1204 10:03:06.937982 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.092931 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/51dd2aae-c620-4d95-b261-1cb6065096e3-plugins-conf\") pod \"51dd2aae-c620-4d95-b261-1cb6065096e3\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.093000 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2x4l6\" (UniqueName: \"kubernetes.io/projected/51dd2aae-c620-4d95-b261-1cb6065096e3-kube-api-access-2x4l6\") pod \"51dd2aae-c620-4d95-b261-1cb6065096e3\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.093030 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/51dd2aae-c620-4d95-b261-1cb6065096e3-erlang-cookie-secret\") pod \"51dd2aae-c620-4d95-b261-1cb6065096e3\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.093104 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/51dd2aae-c620-4d95-b261-1cb6065096e3-pod-info\") pod \"51dd2aae-c620-4d95-b261-1cb6065096e3\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.093134 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-plugins\") pod \"51dd2aae-c620-4d95-b261-1cb6065096e3\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.093194 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-erlang-cookie\") pod \"51dd2aae-c620-4d95-b261-1cb6065096e3\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.093356 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0bb6276d-38a3-4fbf-9004-48537ed76590\") pod \"51dd2aae-c620-4d95-b261-1cb6065096e3\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.093411 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-confd\") pod \"51dd2aae-c620-4d95-b261-1cb6065096e3\" (UID: \"51dd2aae-c620-4d95-b261-1cb6065096e3\") " Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.094074 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "51dd2aae-c620-4d95-b261-1cb6065096e3" (UID: "51dd2aae-c620-4d95-b261-1cb6065096e3"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.094384 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "51dd2aae-c620-4d95-b261-1cb6065096e3" (UID: "51dd2aae-c620-4d95-b261-1cb6065096e3"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.094778 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51dd2aae-c620-4d95-b261-1cb6065096e3-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "51dd2aae-c620-4d95-b261-1cb6065096e3" (UID: "51dd2aae-c620-4d95-b261-1cb6065096e3"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.098649 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51dd2aae-c620-4d95-b261-1cb6065096e3-kube-api-access-2x4l6" (OuterVolumeSpecName: "kube-api-access-2x4l6") pod "51dd2aae-c620-4d95-b261-1cb6065096e3" (UID: "51dd2aae-c620-4d95-b261-1cb6065096e3"). InnerVolumeSpecName "kube-api-access-2x4l6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.098647 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51dd2aae-c620-4d95-b261-1cb6065096e3-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "51dd2aae-c620-4d95-b261-1cb6065096e3" (UID: "51dd2aae-c620-4d95-b261-1cb6065096e3"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.098669 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/51dd2aae-c620-4d95-b261-1cb6065096e3-pod-info" (OuterVolumeSpecName: "pod-info") pod "51dd2aae-c620-4d95-b261-1cb6065096e3" (UID: "51dd2aae-c620-4d95-b261-1cb6065096e3"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.105438 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0bb6276d-38a3-4fbf-9004-48537ed76590" (OuterVolumeSpecName: "persistence") pod "51dd2aae-c620-4d95-b261-1cb6065096e3" (UID: "51dd2aae-c620-4d95-b261-1cb6065096e3"). InnerVolumeSpecName "pvc-0bb6276d-38a3-4fbf-9004-48537ed76590". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.157983 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "51dd2aae-c620-4d95-b261-1cb6065096e3" (UID: "51dd2aae-c620-4d95-b261-1cb6065096e3"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.195031 4707 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.195073 4707 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/51dd2aae-c620-4d95-b261-1cb6065096e3-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.195084 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2x4l6\" (UniqueName: \"kubernetes.io/projected/51dd2aae-c620-4d95-b261-1cb6065096e3-kube-api-access-2x4l6\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.195094 4707 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/51dd2aae-c620-4d95-b261-1cb6065096e3-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.195103 4707 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/51dd2aae-c620-4d95-b261-1cb6065096e3-pod-info\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.195110 4707 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.195118 4707 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/51dd2aae-c620-4d95-b261-1cb6065096e3-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.195151 4707 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-0bb6276d-38a3-4fbf-9004-48537ed76590\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0bb6276d-38a3-4fbf-9004-48537ed76590\") on node \"crc\" " Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.209176 4707 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.209319 4707 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-0bb6276d-38a3-4fbf-9004-48537ed76590" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0bb6276d-38a3-4fbf-9004-48537ed76590") on node "crc" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.296915 4707 reconciler_common.go:293] "Volume detached for volume \"pvc-0bb6276d-38a3-4fbf-9004-48537ed76590\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0bb6276d-38a3-4fbf-9004-48537ed76590\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.406703 4707 generic.go:334] "Generic (PLEG): container finished" podID="51dd2aae-c620-4d95-b261-1cb6065096e3" containerID="10f9c023d7770a7c66c2a81642e90a309ba6299d8ca08937883515507bbd2e02" exitCode=0 Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.406782 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/rabbitmq-server-0" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.406802 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/rabbitmq-server-0" event={"ID":"51dd2aae-c620-4d95-b261-1cb6065096e3","Type":"ContainerDied","Data":"10f9c023d7770a7c66c2a81642e90a309ba6299d8ca08937883515507bbd2e02"} Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.406837 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/rabbitmq-server-0" event={"ID":"51dd2aae-c620-4d95-b261-1cb6065096e3","Type":"ContainerDied","Data":"65d6e14e5ee811d24dde0c4f8e9aa27cfa046301d6e085a0c5591e0b1983ba39"} Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.406858 4707 scope.go:117] "RemoveContainer" containerID="10f9c023d7770a7c66c2a81642e90a309ba6299d8ca08937883515507bbd2e02" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.411290 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/memcached-0" event={"ID":"b38182e9-ec12-42a7-b506-83ba39b9042c","Type":"ContainerDied","Data":"1d945753a1305cfc30656c1ba54b1ccd619aa589826d3b6caada3575dc4f0b99"} Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.411373 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/memcached-0" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.432430 4707 scope.go:117] "RemoveContainer" containerID="3af3f3022d4ba5eb3464e5d2a68b6a0dcd4ec5dd3615e806bca9d9f89efbe829" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.441514 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/memcached-0"] Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.452540 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/memcached-0"] Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.460069 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/rabbitmq-server-0"] Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.465587 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/rabbitmq-server-0"] Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.467706 4707 scope.go:117] "RemoveContainer" containerID="10f9c023d7770a7c66c2a81642e90a309ba6299d8ca08937883515507bbd2e02" Dec 04 10:03:07 crc kubenswrapper[4707]: E1204 10:03:07.468221 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10f9c023d7770a7c66c2a81642e90a309ba6299d8ca08937883515507bbd2e02\": container with ID starting with 10f9c023d7770a7c66c2a81642e90a309ba6299d8ca08937883515507bbd2e02 not found: ID does not exist" containerID="10f9c023d7770a7c66c2a81642e90a309ba6299d8ca08937883515507bbd2e02" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.468280 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10f9c023d7770a7c66c2a81642e90a309ba6299d8ca08937883515507bbd2e02"} err="failed to get container status \"10f9c023d7770a7c66c2a81642e90a309ba6299d8ca08937883515507bbd2e02\": rpc error: code = NotFound desc = could not find container \"10f9c023d7770a7c66c2a81642e90a309ba6299d8ca08937883515507bbd2e02\": container with ID starting with 10f9c023d7770a7c66c2a81642e90a309ba6299d8ca08937883515507bbd2e02 not found: ID does not exist" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.468308 4707 scope.go:117] "RemoveContainer" containerID="3af3f3022d4ba5eb3464e5d2a68b6a0dcd4ec5dd3615e806bca9d9f89efbe829" Dec 04 10:03:07 crc kubenswrapper[4707]: E1204 10:03:07.469500 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3af3f3022d4ba5eb3464e5d2a68b6a0dcd4ec5dd3615e806bca9d9f89efbe829\": container with ID starting with 3af3f3022d4ba5eb3464e5d2a68b6a0dcd4ec5dd3615e806bca9d9f89efbe829 not found: ID does not exist" containerID="3af3f3022d4ba5eb3464e5d2a68b6a0dcd4ec5dd3615e806bca9d9f89efbe829" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.469617 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3af3f3022d4ba5eb3464e5d2a68b6a0dcd4ec5dd3615e806bca9d9f89efbe829"} err="failed to get container status \"3af3f3022d4ba5eb3464e5d2a68b6a0dcd4ec5dd3615e806bca9d9f89efbe829\": rpc error: code = NotFound desc = could not find container \"3af3f3022d4ba5eb3464e5d2a68b6a0dcd4ec5dd3615e806bca9d9f89efbe829\": container with ID starting with 3af3f3022d4ba5eb3464e5d2a68b6a0dcd4ec5dd3615e806bca9d9f89efbe829 not found: ID does not exist" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.469725 4707 scope.go:117] "RemoveContainer" containerID="b5467eccc66bdc7b1b97924d1b861fa1970784955e8b414c0a057b3e1345c1ea" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.931292 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone-db-create-pbqch"] Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.936378 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-1" Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.944407 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/keystone-db-create-pbqch"] Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.956114 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone-3277-account-create-update-qv7m7"] Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.963979 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/keystone-3277-account-create-update-qv7m7"] Dec 04 10:03:07 crc kubenswrapper[4707]: I1204 10:03:07.969265 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone3277-account-delete-bgx8f"] Dec 04 10:03:08 crc kubenswrapper[4707]: E1204 10:03:08.007280 4707 configmap.go:193] Couldn't get configMap manila-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 04 10:03:08 crc kubenswrapper[4707]: E1204 10:03:08.008004 4707 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a5838998-f07f-429c-916c-6ad39edd46cd-operator-scripts podName:a5838998-f07f-429c-916c-6ad39edd46cd nodeName:}" failed. No retries permitted until 2025-12-04 10:03:12.007985289 +0000 UTC m=+1491.443807796 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/a5838998-f07f-429c-916c-6ad39edd46cd-operator-scripts") pod "keystone3277-account-delete-bgx8f" (UID: "a5838998-f07f-429c-916c-6ad39edd46cd") : configmap "openstack-scripts" not found Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.075515 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/openstack-galera-0" podUID="79cd0cfb-7c57-4a38-97c7-a40a24097d29" containerName="galera" containerID="cri-o://940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b" gracePeriod=26 Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.108630 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2538c764-a696-4ce4-95fa-58c782e0b71f-config-data-generated\") pod \"2538c764-a696-4ce4-95fa-58c782e0b71f\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.108704 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwbhw\" (UniqueName: \"kubernetes.io/projected/2538c764-a696-4ce4-95fa-58c782e0b71f-kube-api-access-qwbhw\") pod \"2538c764-a696-4ce4-95fa-58c782e0b71f\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.108732 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-kolla-config\") pod \"2538c764-a696-4ce4-95fa-58c782e0b71f\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.108832 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-config-data-default\") pod \"2538c764-a696-4ce4-95fa-58c782e0b71f\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.108894 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"2538c764-a696-4ce4-95fa-58c782e0b71f\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.108956 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-operator-scripts\") pod \"2538c764-a696-4ce4-95fa-58c782e0b71f\" (UID: \"2538c764-a696-4ce4-95fa-58c782e0b71f\") " Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.110716 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2538c764-a696-4ce4-95fa-58c782e0b71f-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "2538c764-a696-4ce4-95fa-58c782e0b71f" (UID: "2538c764-a696-4ce4-95fa-58c782e0b71f"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.110730 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "2538c764-a696-4ce4-95fa-58c782e0b71f" (UID: "2538c764-a696-4ce4-95fa-58c782e0b71f"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.110754 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "2538c764-a696-4ce4-95fa-58c782e0b71f" (UID: "2538c764-a696-4ce4-95fa-58c782e0b71f"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.111241 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2538c764-a696-4ce4-95fa-58c782e0b71f" (UID: "2538c764-a696-4ce4-95fa-58c782e0b71f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.115027 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2538c764-a696-4ce4-95fa-58c782e0b71f-kube-api-access-qwbhw" (OuterVolumeSpecName: "kube-api-access-qwbhw") pod "2538c764-a696-4ce4-95fa-58c782e0b71f" (UID: "2538c764-a696-4ce4-95fa-58c782e0b71f"). InnerVolumeSpecName "kube-api-access-qwbhw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.120984 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "mysql-db") pod "2538c764-a696-4ce4-95fa-58c782e0b71f" (UID: "2538c764-a696-4ce4-95fa-58c782e0b71f"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 04 10:03:08 crc kubenswrapper[4707]: E1204 10:03:08.175658 4707 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 04 10:03:08 crc kubenswrapper[4707]: E1204 10:03:08.177114 4707 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 04 10:03:08 crc kubenswrapper[4707]: E1204 10:03:08.178329 4707 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 04 10:03:08 crc kubenswrapper[4707]: E1204 10:03:08.178403 4707 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="manila-kuttl-tests/openstack-galera-0" podUID="79cd0cfb-7c57-4a38-97c7-a40a24097d29" containerName="galera" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.185473 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.210109 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.210139 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2538c764-a696-4ce4-95fa-58c782e0b71f-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.210150 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwbhw\" (UniqueName: \"kubernetes.io/projected/2538c764-a696-4ce4-95fa-58c782e0b71f-kube-api-access-qwbhw\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.210158 4707 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.210166 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2538c764-a696-4ce4-95fa-58c782e0b71f-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.210192 4707 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.221995 4707 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.310969 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a5838998-f07f-429c-916c-6ad39edd46cd-operator-scripts\") pod \"a5838998-f07f-429c-916c-6ad39edd46cd\" (UID: \"a5838998-f07f-429c-916c-6ad39edd46cd\") " Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.311121 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5bwj\" (UniqueName: \"kubernetes.io/projected/a5838998-f07f-429c-916c-6ad39edd46cd-kube-api-access-c5bwj\") pod \"a5838998-f07f-429c-916c-6ad39edd46cd\" (UID: \"a5838998-f07f-429c-916c-6ad39edd46cd\") " Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.311485 4707 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.311949 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5838998-f07f-429c-916c-6ad39edd46cd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a5838998-f07f-429c-916c-6ad39edd46cd" (UID: "a5838998-f07f-429c-916c-6ad39edd46cd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.314013 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5838998-f07f-429c-916c-6ad39edd46cd-kube-api-access-c5bwj" (OuterVolumeSpecName: "kube-api-access-c5bwj") pod "a5838998-f07f-429c-916c-6ad39edd46cd" (UID: "a5838998-f07f-429c-916c-6ad39edd46cd"). InnerVolumeSpecName "kube-api-access-c5bwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.413645 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5bwj\" (UniqueName: \"kubernetes.io/projected/a5838998-f07f-429c-916c-6ad39edd46cd-kube-api-access-c5bwj\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.414453 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a5838998-f07f-429c-916c-6ad39edd46cd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.420361 4707 generic.go:334] "Generic (PLEG): container finished" podID="2538c764-a696-4ce4-95fa-58c782e0b71f" containerID="4d5787839e341a844c199843a3505da5ae9f7972cd12ca9fb557c6dfbe113e96" exitCode=0 Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.420435 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-1" event={"ID":"2538c764-a696-4ce4-95fa-58c782e0b71f","Type":"ContainerDied","Data":"4d5787839e341a844c199843a3505da5ae9f7972cd12ca9fb557c6dfbe113e96"} Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.420464 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-1" event={"ID":"2538c764-a696-4ce4-95fa-58c782e0b71f","Type":"ContainerDied","Data":"a8d628b48c7a97e6a71fa0bded88744fe494742b8ed739c66e73424b4529e7cb"} Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.420482 4707 scope.go:117] "RemoveContainer" containerID="4d5787839e341a844c199843a3505da5ae9f7972cd12ca9fb557c6dfbe113e96" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.420595 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-1" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.423023 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" event={"ID":"a5838998-f07f-429c-916c-6ad39edd46cd","Type":"ContainerDied","Data":"c30dfd932e004baf579978ac88ccbe3ceefad08d25e7038c6d725991bcd69821"} Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.423165 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone3277-account-delete-bgx8f" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.440488 4707 scope.go:117] "RemoveContainer" containerID="e715ffc246aa52628ebf660d05fa28926e8b60cd6634627f6a89f31b9187ae8e" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.458189 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone3277-account-delete-bgx8f"] Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.467982 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/keystone3277-account-delete-bgx8f"] Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.469514 4707 scope.go:117] "RemoveContainer" containerID="4d5787839e341a844c199843a3505da5ae9f7972cd12ca9fb557c6dfbe113e96" Dec 04 10:03:08 crc kubenswrapper[4707]: E1204 10:03:08.469994 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d5787839e341a844c199843a3505da5ae9f7972cd12ca9fb557c6dfbe113e96\": container with ID starting with 4d5787839e341a844c199843a3505da5ae9f7972cd12ca9fb557c6dfbe113e96 not found: ID does not exist" containerID="4d5787839e341a844c199843a3505da5ae9f7972cd12ca9fb557c6dfbe113e96" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.470032 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d5787839e341a844c199843a3505da5ae9f7972cd12ca9fb557c6dfbe113e96"} err="failed to get container status \"4d5787839e341a844c199843a3505da5ae9f7972cd12ca9fb557c6dfbe113e96\": rpc error: code = NotFound desc = could not find container \"4d5787839e341a844c199843a3505da5ae9f7972cd12ca9fb557c6dfbe113e96\": container with ID starting with 4d5787839e341a844c199843a3505da5ae9f7972cd12ca9fb557c6dfbe113e96 not found: ID does not exist" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.470054 4707 scope.go:117] "RemoveContainer" containerID="e715ffc246aa52628ebf660d05fa28926e8b60cd6634627f6a89f31b9187ae8e" Dec 04 10:03:08 crc kubenswrapper[4707]: E1204 10:03:08.470477 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e715ffc246aa52628ebf660d05fa28926e8b60cd6634627f6a89f31b9187ae8e\": container with ID starting with e715ffc246aa52628ebf660d05fa28926e8b60cd6634627f6a89f31b9187ae8e not found: ID does not exist" containerID="e715ffc246aa52628ebf660d05fa28926e8b60cd6634627f6a89f31b9187ae8e" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.470503 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e715ffc246aa52628ebf660d05fa28926e8b60cd6634627f6a89f31b9187ae8e"} err="failed to get container status \"e715ffc246aa52628ebf660d05fa28926e8b60cd6634627f6a89f31b9187ae8e\": rpc error: code = NotFound desc = could not find container \"e715ffc246aa52628ebf660d05fa28926e8b60cd6634627f6a89f31b9187ae8e\": container with ID starting with e715ffc246aa52628ebf660d05fa28926e8b60cd6634627f6a89f31b9187ae8e not found: ID does not exist" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.470523 4707 scope.go:117] "RemoveContainer" containerID="96acdea9d1adf226492841fd35c730bd6c7be4592a253706df5ab8955a2d3c72" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.472799 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/openstack-galera-1"] Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.476983 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/openstack-galera-1"] Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.822677 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-0" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.852495 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2538c764-a696-4ce4-95fa-58c782e0b71f" path="/var/lib/kubelet/pods/2538c764-a696-4ce4-95fa-58c782e0b71f/volumes" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.853260 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51dd2aae-c620-4d95-b261-1cb6065096e3" path="/var/lib/kubelet/pods/51dd2aae-c620-4d95-b261-1cb6065096e3/volumes" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.853933 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59418610-f02f-4d29-b520-6fccfadad63e" path="/var/lib/kubelet/pods/59418610-f02f-4d29-b520-6fccfadad63e/volumes" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.855178 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7506a3de-2989-488c-8f6d-f3c566d0c682" path="/var/lib/kubelet/pods/7506a3de-2989-488c-8f6d-f3c566d0c682/volumes" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.855826 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5838998-f07f-429c-916c-6ad39edd46cd" path="/var/lib/kubelet/pods/a5838998-f07f-429c-916c-6ad39edd46cd/volumes" Dec 04 10:03:08 crc kubenswrapper[4707]: I1204 10:03:08.856410 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b38182e9-ec12-42a7-b506-83ba39b9042c" path="/var/lib/kubelet/pods/b38182e9-ec12-42a7-b506-83ba39b9042c/volumes" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.021058 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-operator-scripts\") pod \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.021118 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.021147 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfr9n\" (UniqueName: \"kubernetes.io/projected/79cd0cfb-7c57-4a38-97c7-a40a24097d29-kube-api-access-bfr9n\") pod \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.021180 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79cd0cfb-7c57-4a38-97c7-a40a24097d29-config-data-generated\") pod \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.021237 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-kolla-config\") pod \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.021320 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-config-data-default\") pod \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\" (UID: \"79cd0cfb-7c57-4a38-97c7-a40a24097d29\") " Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.021760 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79cd0cfb-7c57-4a38-97c7-a40a24097d29-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "79cd0cfb-7c57-4a38-97c7-a40a24097d29" (UID: "79cd0cfb-7c57-4a38-97c7-a40a24097d29"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.021982 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "79cd0cfb-7c57-4a38-97c7-a40a24097d29" (UID: "79cd0cfb-7c57-4a38-97c7-a40a24097d29"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.022138 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "79cd0cfb-7c57-4a38-97c7-a40a24097d29" (UID: "79cd0cfb-7c57-4a38-97c7-a40a24097d29"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.022252 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "79cd0cfb-7c57-4a38-97c7-a40a24097d29" (UID: "79cd0cfb-7c57-4a38-97c7-a40a24097d29"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.022292 4707 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.022342 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79cd0cfb-7c57-4a38-97c7-a40a24097d29-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.022356 4707 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.025616 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79cd0cfb-7c57-4a38-97c7-a40a24097d29-kube-api-access-bfr9n" (OuterVolumeSpecName: "kube-api-access-bfr9n") pod "79cd0cfb-7c57-4a38-97c7-a40a24097d29" (UID: "79cd0cfb-7c57-4a38-97c7-a40a24097d29"). InnerVolumeSpecName "kube-api-access-bfr9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.029457 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "mysql-db") pod "79cd0cfb-7c57-4a38-97c7-a40a24097d29" (UID: "79cd0cfb-7c57-4a38-97c7-a40a24097d29"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.124392 4707 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.124442 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfr9n\" (UniqueName: \"kubernetes.io/projected/79cd0cfb-7c57-4a38-97c7-a40a24097d29-kube-api-access-bfr9n\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.124458 4707 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79cd0cfb-7c57-4a38-97c7-a40a24097d29-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.140631 4707 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.225089 4707 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.438132 4707 generic.go:334] "Generic (PLEG): container finished" podID="79cd0cfb-7c57-4a38-97c7-a40a24097d29" containerID="940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b" exitCode=0 Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.438280 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-0" event={"ID":"79cd0cfb-7c57-4a38-97c7-a40a24097d29","Type":"ContainerDied","Data":"940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b"} Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.439221 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-0" event={"ID":"79cd0cfb-7c57-4a38-97c7-a40a24097d29","Type":"ContainerDied","Data":"6cdf2aade72c69e6e1657f00835a392b30a87806a2a319286546fb49321a6b36"} Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.438399 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-0" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.439296 4707 scope.go:117] "RemoveContainer" containerID="940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.456976 4707 scope.go:117] "RemoveContainer" containerID="044de3d4eeb2e204f2c09cf4cbb3cc43af3698151054feac19359a22b288f413" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.473497 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/openstack-galera-0"] Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.477759 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/openstack-galera-0"] Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.482666 4707 scope.go:117] "RemoveContainer" containerID="940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b" Dec 04 10:03:09 crc kubenswrapper[4707]: E1204 10:03:09.483087 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b\": container with ID starting with 940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b not found: ID does not exist" containerID="940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.483137 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b"} err="failed to get container status \"940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b\": rpc error: code = NotFound desc = could not find container \"940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b\": container with ID starting with 940a266485d9df60cc91fe08a8a00e059d08c3cb7e65831a127c43de06a77c4b not found: ID does not exist" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.483166 4707 scope.go:117] "RemoveContainer" containerID="044de3d4eeb2e204f2c09cf4cbb3cc43af3698151054feac19359a22b288f413" Dec 04 10:03:09 crc kubenswrapper[4707]: E1204 10:03:09.483469 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"044de3d4eeb2e204f2c09cf4cbb3cc43af3698151054feac19359a22b288f413\": container with ID starting with 044de3d4eeb2e204f2c09cf4cbb3cc43af3698151054feac19359a22b288f413 not found: ID does not exist" containerID="044de3d4eeb2e204f2c09cf4cbb3cc43af3698151054feac19359a22b288f413" Dec 04 10:03:09 crc kubenswrapper[4707]: I1204 10:03:09.483506 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"044de3d4eeb2e204f2c09cf4cbb3cc43af3698151054feac19359a22b288f413"} err="failed to get container status \"044de3d4eeb2e204f2c09cf4cbb3cc43af3698151054feac19359a22b288f413\": rpc error: code = NotFound desc = could not find container \"044de3d4eeb2e204f2c09cf4cbb3cc43af3698151054feac19359a22b288f413\": container with ID starting with 044de3d4eeb2e204f2c09cf4cbb3cc43af3698151054feac19359a22b288f413 not found: ID does not exist" Dec 04 10:03:10 crc kubenswrapper[4707]: I1204 10:03:10.858208 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79cd0cfb-7c57-4a38-97c7-a40a24097d29" path="/var/lib/kubelet/pods/79cd0cfb-7c57-4a38-97c7-a40a24097d29/volumes" Dec 04 10:03:21 crc kubenswrapper[4707]: I1204 10:03:21.892316 4707 scope.go:117] "RemoveContainer" containerID="9375f91b80d047dffc37d4a8d9986d6c47895fd272438cb3c2eae279d3cc3e93" Dec 04 10:03:21 crc kubenswrapper[4707]: I1204 10:03:21.938744 4707 scope.go:117] "RemoveContainer" containerID="24d45e78e1e066edfd4d4e2a448c7b535d8b056b02ae1bcbc750910271d2934d" Dec 04 10:03:21 crc kubenswrapper[4707]: I1204 10:03:21.965323 4707 scope.go:117] "RemoveContainer" containerID="a3683d7f45d85789b8bb6a9cd1952f8450f08efb65c0b043d690541db88ff7f5" Dec 04 10:03:21 crc kubenswrapper[4707]: I1204 10:03:21.985266 4707 scope.go:117] "RemoveContainer" containerID="6ab8fa43ecee348c0d9630bf76404b132a3b945ccf9bb54c3cffd1b2939e95b4" Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.512378 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.536769 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9m85t\" (UniqueName: \"kubernetes.io/projected/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-kube-api-access-9m85t\") pod \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\" (UID: \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\") " Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.536863 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-job-config-data\") pod \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\" (UID: \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\") " Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.536888 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-config-data\") pod \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\" (UID: \"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce\") " Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.543911 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce" (UID: "f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.543994 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-kube-api-access-9m85t" (OuterVolumeSpecName: "kube-api-access-9m85t") pod "f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce" (UID: "f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce"). InnerVolumeSpecName "kube-api-access-9m85t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.547071 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-config-data" (OuterVolumeSpecName: "config-data") pod "f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce" (UID: "f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.549798 4707 generic.go:334] "Generic (PLEG): container finished" podID="f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce" containerID="7aeec5f4629f22d5df89a3ec8527b7358a36700ffe1267cf8fe91f8386b04bdd" exitCode=137 Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.549845 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" event={"ID":"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce","Type":"ContainerDied","Data":"7aeec5f4629f22d5df89a3ec8527b7358a36700ffe1267cf8fe91f8386b04bdd"} Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.549878 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" event={"ID":"f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce","Type":"ContainerDied","Data":"ebf2c452d49636e565706dea8acfa3240a0edc8a4caeb559af3a7ff6b234a51a"} Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.549899 4707 scope.go:117] "RemoveContainer" containerID="7aeec5f4629f22d5df89a3ec8527b7358a36700ffe1267cf8fe91f8386b04bdd" Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.550038 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx" Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.603681 4707 scope.go:117] "RemoveContainer" containerID="7aeec5f4629f22d5df89a3ec8527b7358a36700ffe1267cf8fe91f8386b04bdd" Dec 04 10:03:25 crc kubenswrapper[4707]: E1204 10:03:25.604266 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7aeec5f4629f22d5df89a3ec8527b7358a36700ffe1267cf8fe91f8386b04bdd\": container with ID starting with 7aeec5f4629f22d5df89a3ec8527b7358a36700ffe1267cf8fe91f8386b04bdd not found: ID does not exist" containerID="7aeec5f4629f22d5df89a3ec8527b7358a36700ffe1267cf8fe91f8386b04bdd" Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.604297 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7aeec5f4629f22d5df89a3ec8527b7358a36700ffe1267cf8fe91f8386b04bdd"} err="failed to get container status \"7aeec5f4629f22d5df89a3ec8527b7358a36700ffe1267cf8fe91f8386b04bdd\": rpc error: code = NotFound desc = could not find container \"7aeec5f4629f22d5df89a3ec8527b7358a36700ffe1267cf8fe91f8386b04bdd\": container with ID starting with 7aeec5f4629f22d5df89a3ec8527b7358a36700ffe1267cf8fe91f8386b04bdd not found: ID does not exist" Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.606915 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx"] Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.610826 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-service-cleanup-n5b5h655-lpdjx"] Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.637766 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9m85t\" (UniqueName: \"kubernetes.io/projected/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-kube-api-access-9m85t\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.637811 4707 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-job-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:25 crc kubenswrapper[4707]: I1204 10:03:25.637827 4707 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:26 crc kubenswrapper[4707]: I1204 10:03:26.852958 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce" path="/var/lib/kubelet/pods/f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce/volumes" Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.599474 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/ceph" Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.623787 4707 generic.go:334] "Generic (PLEG): container finished" podID="ae5166d7-2ccb-4e29-8066-7b355eb947cc" containerID="eae13d613b80c0afda601e087c0158271a01da5d49b078a2d0f91b27aeda12aa" exitCode=137 Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.623835 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/ceph" Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.623831 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/ceph" event={"ID":"ae5166d7-2ccb-4e29-8066-7b355eb947cc","Type":"ContainerDied","Data":"eae13d613b80c0afda601e087c0158271a01da5d49b078a2d0f91b27aeda12aa"} Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.623927 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/ceph" event={"ID":"ae5166d7-2ccb-4e29-8066-7b355eb947cc","Type":"ContainerDied","Data":"2dda0991b27aa38dae5fc8c01e5d78024648c94236ffaa22bf82d5f34018df9a"} Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.623944 4707 scope.go:117] "RemoveContainer" containerID="eae13d613b80c0afda601e087c0158271a01da5d49b078a2d0f91b27aeda12aa" Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.637489 4707 scope.go:117] "RemoveContainer" containerID="eae13d613b80c0afda601e087c0158271a01da5d49b078a2d0f91b27aeda12aa" Dec 04 10:03:36 crc kubenswrapper[4707]: E1204 10:03:36.637886 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eae13d613b80c0afda601e087c0158271a01da5d49b078a2d0f91b27aeda12aa\": container with ID starting with eae13d613b80c0afda601e087c0158271a01da5d49b078a2d0f91b27aeda12aa not found: ID does not exist" containerID="eae13d613b80c0afda601e087c0158271a01da5d49b078a2d0f91b27aeda12aa" Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.637915 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eae13d613b80c0afda601e087c0158271a01da5d49b078a2d0f91b27aeda12aa"} err="failed to get container status \"eae13d613b80c0afda601e087c0158271a01da5d49b078a2d0f91b27aeda12aa\": rpc error: code = NotFound desc = could not find container \"eae13d613b80c0afda601e087c0158271a01da5d49b078a2d0f91b27aeda12aa\": container with ID starting with eae13d613b80c0afda601e087c0158271a01da5d49b078a2d0f91b27aeda12aa not found: ID does not exist" Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.768179 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-data\") pod \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.768372 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-log\") pod \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.768413 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpj7t\" (UniqueName: \"kubernetes.io/projected/ae5166d7-2ccb-4e29-8066-7b355eb947cc-kube-api-access-xpj7t\") pod \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.768453 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-run\") pod \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\" (UID: \"ae5166d7-2ccb-4e29-8066-7b355eb947cc\") " Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.769263 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-run" (OuterVolumeSpecName: "run") pod "ae5166d7-2ccb-4e29-8066-7b355eb947cc" (UID: "ae5166d7-2ccb-4e29-8066-7b355eb947cc"). InnerVolumeSpecName "run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.769325 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-log" (OuterVolumeSpecName: "log") pod "ae5166d7-2ccb-4e29-8066-7b355eb947cc" (UID: "ae5166d7-2ccb-4e29-8066-7b355eb947cc"). InnerVolumeSpecName "log". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.773253 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae5166d7-2ccb-4e29-8066-7b355eb947cc-kube-api-access-xpj7t" (OuterVolumeSpecName: "kube-api-access-xpj7t") pod "ae5166d7-2ccb-4e29-8066-7b355eb947cc" (UID: "ae5166d7-2ccb-4e29-8066-7b355eb947cc"). InnerVolumeSpecName "kube-api-access-xpj7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.774199 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-data" (OuterVolumeSpecName: "data") pod "ae5166d7-2ccb-4e29-8066-7b355eb947cc" (UID: "ae5166d7-2ccb-4e29-8066-7b355eb947cc"). InnerVolumeSpecName "data". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.869869 4707 reconciler_common.go:293] "Volume detached for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-log\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.869915 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpj7t\" (UniqueName: \"kubernetes.io/projected/ae5166d7-2ccb-4e29-8066-7b355eb947cc-kube-api-access-xpj7t\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.869935 4707 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-run\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.869950 4707 reconciler_common.go:293] "Volume detached for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/ae5166d7-2ccb-4e29-8066-7b355eb947cc-data\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.940173 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/ceph"] Dec 04 10:03:36 crc kubenswrapper[4707]: I1204 10:03:36.945118 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/ceph"] Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.095879 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-68759b947-999sr"] Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.096437 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" podUID="6e85dcf5-11e1-48ad-b884-404af35dd76a" containerName="manager" containerID="cri-o://aaee4919027abbabb97937db3e4d69c1bcafdf4c19291841d8bb39dcc9c33d24" gracePeriod=10 Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.397751 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-8xfpv"] Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.398219 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-index-8xfpv" podUID="1faad1a1-281e-4341-8bef-0a5b6c8051e4" containerName="registry-server" containerID="cri-o://df2830c5c10ccd2e82c473f0e278f66af891755ea78df7a2e18f3b8b3e6d000e" gracePeriod=30 Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.443223 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527"] Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.454611 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a12188527"] Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.538038 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.642896 4707 generic.go:334] "Generic (PLEG): container finished" podID="6e85dcf5-11e1-48ad-b884-404af35dd76a" containerID="aaee4919027abbabb97937db3e4d69c1bcafdf4c19291841d8bb39dcc9c33d24" exitCode=0 Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.642955 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" event={"ID":"6e85dcf5-11e1-48ad-b884-404af35dd76a","Type":"ContainerDied","Data":"aaee4919027abbabb97937db3e4d69c1bcafdf4c19291841d8bb39dcc9c33d24"} Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.642986 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" event={"ID":"6e85dcf5-11e1-48ad-b884-404af35dd76a","Type":"ContainerDied","Data":"71d2e96852e2d280458063073c8e72c5a79ae223dd3ca5d10b7a94944efc0c52"} Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.643002 4707 scope.go:117] "RemoveContainer" containerID="aaee4919027abbabb97937db3e4d69c1bcafdf4c19291841d8bb39dcc9c33d24" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.643090 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-68759b947-999sr" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.646881 4707 generic.go:334] "Generic (PLEG): container finished" podID="1faad1a1-281e-4341-8bef-0a5b6c8051e4" containerID="df2830c5c10ccd2e82c473f0e278f66af891755ea78df7a2e18f3b8b3e6d000e" exitCode=0 Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.646915 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-8xfpv" event={"ID":"1faad1a1-281e-4341-8bef-0a5b6c8051e4","Type":"ContainerDied","Data":"df2830c5c10ccd2e82c473f0e278f66af891755ea78df7a2e18f3b8b3e6d000e"} Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.672850 4707 scope.go:117] "RemoveContainer" containerID="aaee4919027abbabb97937db3e4d69c1bcafdf4c19291841d8bb39dcc9c33d24" Dec 04 10:03:38 crc kubenswrapper[4707]: E1204 10:03:38.674305 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aaee4919027abbabb97937db3e4d69c1bcafdf4c19291841d8bb39dcc9c33d24\": container with ID starting with aaee4919027abbabb97937db3e4d69c1bcafdf4c19291841d8bb39dcc9c33d24 not found: ID does not exist" containerID="aaee4919027abbabb97937db3e4d69c1bcafdf4c19291841d8bb39dcc9c33d24" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.674354 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaee4919027abbabb97937db3e4d69c1bcafdf4c19291841d8bb39dcc9c33d24"} err="failed to get container status \"aaee4919027abbabb97937db3e4d69c1bcafdf4c19291841d8bb39dcc9c33d24\": rpc error: code = NotFound desc = could not find container \"aaee4919027abbabb97937db3e4d69c1bcafdf4c19291841d8bb39dcc9c33d24\": container with ID starting with aaee4919027abbabb97937db3e4d69c1bcafdf4c19291841d8bb39dcc9c33d24 not found: ID does not exist" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.694771 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e85dcf5-11e1-48ad-b884-404af35dd76a-webhook-cert\") pod \"6e85dcf5-11e1-48ad-b884-404af35dd76a\" (UID: \"6e85dcf5-11e1-48ad-b884-404af35dd76a\") " Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.694918 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e85dcf5-11e1-48ad-b884-404af35dd76a-apiservice-cert\") pod \"6e85dcf5-11e1-48ad-b884-404af35dd76a\" (UID: \"6e85dcf5-11e1-48ad-b884-404af35dd76a\") " Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.694988 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7p94\" (UniqueName: \"kubernetes.io/projected/6e85dcf5-11e1-48ad-b884-404af35dd76a-kube-api-access-b7p94\") pod \"6e85dcf5-11e1-48ad-b884-404af35dd76a\" (UID: \"6e85dcf5-11e1-48ad-b884-404af35dd76a\") " Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.701836 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e85dcf5-11e1-48ad-b884-404af35dd76a-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "6e85dcf5-11e1-48ad-b884-404af35dd76a" (UID: "6e85dcf5-11e1-48ad-b884-404af35dd76a"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.702121 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e85dcf5-11e1-48ad-b884-404af35dd76a-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "6e85dcf5-11e1-48ad-b884-404af35dd76a" (UID: "6e85dcf5-11e1-48ad-b884-404af35dd76a"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.716198 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e85dcf5-11e1-48ad-b884-404af35dd76a-kube-api-access-b7p94" (OuterVolumeSpecName: "kube-api-access-b7p94") pod "6e85dcf5-11e1-48ad-b884-404af35dd76a" (UID: "6e85dcf5-11e1-48ad-b884-404af35dd76a"). InnerVolumeSpecName "kube-api-access-b7p94". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.757805 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-8xfpv" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.795501 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kssh\" (UniqueName: \"kubernetes.io/projected/1faad1a1-281e-4341-8bef-0a5b6c8051e4-kube-api-access-6kssh\") pod \"1faad1a1-281e-4341-8bef-0a5b6c8051e4\" (UID: \"1faad1a1-281e-4341-8bef-0a5b6c8051e4\") " Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.795794 4707 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e85dcf5-11e1-48ad-b884-404af35dd76a-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.795812 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7p94\" (UniqueName: \"kubernetes.io/projected/6e85dcf5-11e1-48ad-b884-404af35dd76a-kube-api-access-b7p94\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.795828 4707 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e85dcf5-11e1-48ad-b884-404af35dd76a-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.799550 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1faad1a1-281e-4341-8bef-0a5b6c8051e4-kube-api-access-6kssh" (OuterVolumeSpecName: "kube-api-access-6kssh") pod "1faad1a1-281e-4341-8bef-0a5b6c8051e4" (UID: "1faad1a1-281e-4341-8bef-0a5b6c8051e4"). InnerVolumeSpecName "kube-api-access-6kssh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.854090 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="437bf12b-06d2-4fed-a6ff-c5b65eea01fe" path="/var/lib/kubelet/pods/437bf12b-06d2-4fed-a6ff-c5b65eea01fe/volumes" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.856108 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae5166d7-2ccb-4e29-8066-7b355eb947cc" path="/var/lib/kubelet/pods/ae5166d7-2ccb-4e29-8066-7b355eb947cc/volumes" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.896408 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kssh\" (UniqueName: \"kubernetes.io/projected/1faad1a1-281e-4341-8bef-0a5b6c8051e4-kube-api-access-6kssh\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.961456 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-68759b947-999sr"] Dec 04 10:03:38 crc kubenswrapper[4707]: I1204 10:03:38.965156 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-68759b947-999sr"] Dec 04 10:03:39 crc kubenswrapper[4707]: I1204 10:03:39.730954 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-8xfpv" Dec 04 10:03:39 crc kubenswrapper[4707]: I1204 10:03:39.730943 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-8xfpv" event={"ID":"1faad1a1-281e-4341-8bef-0a5b6c8051e4","Type":"ContainerDied","Data":"247031a37e85137d006b33f0a16a36e0c2638c89b591130813be302ebd45a153"} Dec 04 10:03:39 crc kubenswrapper[4707]: I1204 10:03:39.731601 4707 scope.go:117] "RemoveContainer" containerID="df2830c5c10ccd2e82c473f0e278f66af891755ea78df7a2e18f3b8b3e6d000e" Dec 04 10:03:39 crc kubenswrapper[4707]: I1204 10:03:39.752000 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-8xfpv"] Dec 04 10:03:39 crc kubenswrapper[4707]: I1204 10:03:39.758149 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-index-8xfpv"] Dec 04 10:03:40 crc kubenswrapper[4707]: I1204 10:03:40.489896 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb"] Dec 04 10:03:40 crc kubenswrapper[4707]: I1204 10:03:40.490121 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb" podUID="ff3cb35d-0e1e-41a5-ba41-efc60015c860" containerName="operator" containerID="cri-o://1a85e44377fe4e060de58e0bf9c07bb8b45437d23f2159373edb6007b7d46edf" gracePeriod=10 Dec 04 10:03:40 crc kubenswrapper[4707]: I1204 10:03:40.743614 4707 generic.go:334] "Generic (PLEG): container finished" podID="ff3cb35d-0e1e-41a5-ba41-efc60015c860" containerID="1a85e44377fe4e060de58e0bf9c07bb8b45437d23f2159373edb6007b7d46edf" exitCode=0 Dec 04 10:03:40 crc kubenswrapper[4707]: I1204 10:03:40.743668 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb" event={"ID":"ff3cb35d-0e1e-41a5-ba41-efc60015c860","Type":"ContainerDied","Data":"1a85e44377fe4e060de58e0bf9c07bb8b45437d23f2159373edb6007b7d46edf"} Dec 04 10:03:40 crc kubenswrapper[4707]: I1204 10:03:40.779565 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-prnzl"] Dec 04 10:03:40 crc kubenswrapper[4707]: I1204 10:03:40.779793 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" podUID="07f17250-36c1-4129-aa10-2e5c81fed559" containerName="registry-server" containerID="cri-o://0421f57b6a4206bec1496b9cb447f77261e0d34970a6aa26e2c801a61c87bc01" gracePeriod=30 Dec 04 10:03:40 crc kubenswrapper[4707]: I1204 10:03:40.814226 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9"] Dec 04 10:03:40 crc kubenswrapper[4707]: I1204 10:03:40.821130 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590hckq9"] Dec 04 10:03:40 crc kubenswrapper[4707]: I1204 10:03:40.857628 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1faad1a1-281e-4341-8bef-0a5b6c8051e4" path="/var/lib/kubelet/pods/1faad1a1-281e-4341-8bef-0a5b6c8051e4/volumes" Dec 04 10:03:40 crc kubenswrapper[4707]: I1204 10:03:40.858243 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50e8161a-5e0d-4468-a1b8-de0e9d48bcee" path="/var/lib/kubelet/pods/50e8161a-5e0d-4468-a1b8-de0e9d48bcee/volumes" Dec 04 10:03:40 crc kubenswrapper[4707]: I1204 10:03:40.858947 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e85dcf5-11e1-48ad-b884-404af35dd76a" path="/var/lib/kubelet/pods/6e85dcf5-11e1-48ad-b884-404af35dd76a/volumes" Dec 04 10:03:40 crc kubenswrapper[4707]: I1204 10:03:40.893063 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb" Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.021949 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhrdp\" (UniqueName: \"kubernetes.io/projected/ff3cb35d-0e1e-41a5-ba41-efc60015c860-kube-api-access-bhrdp\") pod \"ff3cb35d-0e1e-41a5-ba41-efc60015c860\" (UID: \"ff3cb35d-0e1e-41a5-ba41-efc60015c860\") " Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.044324 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff3cb35d-0e1e-41a5-ba41-efc60015c860-kube-api-access-bhrdp" (OuterVolumeSpecName: "kube-api-access-bhrdp") pod "ff3cb35d-0e1e-41a5-ba41-efc60015c860" (UID: "ff3cb35d-0e1e-41a5-ba41-efc60015c860"). InnerVolumeSpecName "kube-api-access-bhrdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.123687 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhrdp\" (UniqueName: \"kubernetes.io/projected/ff3cb35d-0e1e-41a5-ba41-efc60015c860-kube-api-access-bhrdp\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.169135 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.325614 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h76wc\" (UniqueName: \"kubernetes.io/projected/07f17250-36c1-4129-aa10-2e5c81fed559-kube-api-access-h76wc\") pod \"07f17250-36c1-4129-aa10-2e5c81fed559\" (UID: \"07f17250-36c1-4129-aa10-2e5c81fed559\") " Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.329529 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07f17250-36c1-4129-aa10-2e5c81fed559-kube-api-access-h76wc" (OuterVolumeSpecName: "kube-api-access-h76wc") pod "07f17250-36c1-4129-aa10-2e5c81fed559" (UID: "07f17250-36c1-4129-aa10-2e5c81fed559"). InnerVolumeSpecName "kube-api-access-h76wc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.427087 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h76wc\" (UniqueName: \"kubernetes.io/projected/07f17250-36c1-4129-aa10-2e5c81fed559-kube-api-access-h76wc\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.753735 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb" event={"ID":"ff3cb35d-0e1e-41a5-ba41-efc60015c860","Type":"ContainerDied","Data":"bf646547734b53b1a6b6611d91f3be15144a3b997a2de817afd9ea9125980bc8"} Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.753761 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb" Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.753803 4707 scope.go:117] "RemoveContainer" containerID="1a85e44377fe4e060de58e0bf9c07bb8b45437d23f2159373edb6007b7d46edf" Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.755719 4707 generic.go:334] "Generic (PLEG): container finished" podID="07f17250-36c1-4129-aa10-2e5c81fed559" containerID="0421f57b6a4206bec1496b9cb447f77261e0d34970a6aa26e2c801a61c87bc01" exitCode=0 Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.755759 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" event={"ID":"07f17250-36c1-4129-aa10-2e5c81fed559","Type":"ContainerDied","Data":"0421f57b6a4206bec1496b9cb447f77261e0d34970a6aa26e2c801a61c87bc01"} Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.755788 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" event={"ID":"07f17250-36c1-4129-aa10-2e5c81fed559","Type":"ContainerDied","Data":"063610d2e528440c60e5e0fe28e20c02e1db9a441c4d972befd616877ccd7bdf"} Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.755859 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-prnzl" Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.771386 4707 scope.go:117] "RemoveContainer" containerID="0421f57b6a4206bec1496b9cb447f77261e0d34970a6aa26e2c801a61c87bc01" Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.783945 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb"] Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.790854 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-twnpb"] Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.792043 4707 scope.go:117] "RemoveContainer" containerID="0421f57b6a4206bec1496b9cb447f77261e0d34970a6aa26e2c801a61c87bc01" Dec 04 10:03:41 crc kubenswrapper[4707]: E1204 10:03:41.794549 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0421f57b6a4206bec1496b9cb447f77261e0d34970a6aa26e2c801a61c87bc01\": container with ID starting with 0421f57b6a4206bec1496b9cb447f77261e0d34970a6aa26e2c801a61c87bc01 not found: ID does not exist" containerID="0421f57b6a4206bec1496b9cb447f77261e0d34970a6aa26e2c801a61c87bc01" Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.794601 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0421f57b6a4206bec1496b9cb447f77261e0d34970a6aa26e2c801a61c87bc01"} err="failed to get container status \"0421f57b6a4206bec1496b9cb447f77261e0d34970a6aa26e2c801a61c87bc01\": rpc error: code = NotFound desc = could not find container \"0421f57b6a4206bec1496b9cb447f77261e0d34970a6aa26e2c801a61c87bc01\": container with ID starting with 0421f57b6a4206bec1496b9cb447f77261e0d34970a6aa26e2c801a61c87bc01 not found: ID does not exist" Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.798191 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-prnzl"] Dec 04 10:03:41 crc kubenswrapper[4707]: I1204 10:03:41.804388 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-prnzl"] Dec 04 10:03:42 crc kubenswrapper[4707]: I1204 10:03:42.851571 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07f17250-36c1-4129-aa10-2e5c81fed559" path="/var/lib/kubelet/pods/07f17250-36c1-4129-aa10-2e5c81fed559/volumes" Dec 04 10:03:42 crc kubenswrapper[4707]: I1204 10:03:42.852574 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff3cb35d-0e1e-41a5-ba41-efc60015c860" path="/var/lib/kubelet/pods/ff3cb35d-0e1e-41a5-ba41-efc60015c860/volumes" Dec 04 10:03:44 crc kubenswrapper[4707]: I1204 10:03:44.433812 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc"] Dec 04 10:03:44 crc kubenswrapper[4707]: I1204 10:03:44.434720 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" podUID="dee882a8-abdc-45cb-b451-121bd9579e0f" containerName="manager" containerID="cri-o://c8bbf20d55964d3101a974ead8aed6aab528464e5701ea73c2d46c21aa7b740b" gracePeriod=10 Dec 04 10:03:44 crc kubenswrapper[4707]: I1204 10:03:44.434806 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" podUID="dee882a8-abdc-45cb-b451-121bd9579e0f" containerName="kube-rbac-proxy" containerID="cri-o://1439692c614e0a37a6ece66472d71a5abd2d940577fd6cab4fd5836cfd138ba6" gracePeriod=10 Dec 04 10:03:44 crc kubenswrapper[4707]: I1204 10:03:44.755047 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-nd5wt"] Dec 04 10:03:44 crc kubenswrapper[4707]: I1204 10:03:44.755608 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-nd5wt" podUID="fca8013f-27a6-4450-93d8-8a5949d66b59" containerName="registry-server" containerID="cri-o://c7e35a219b0ff347918508de2189b83c79d89eb038129a7160bd891d474d4ac2" gracePeriod=30 Dec 04 10:03:44 crc kubenswrapper[4707]: I1204 10:03:44.809924 4707 generic.go:334] "Generic (PLEG): container finished" podID="dee882a8-abdc-45cb-b451-121bd9579e0f" containerID="1439692c614e0a37a6ece66472d71a5abd2d940577fd6cab4fd5836cfd138ba6" exitCode=0 Dec 04 10:03:44 crc kubenswrapper[4707]: I1204 10:03:44.809965 4707 generic.go:334] "Generic (PLEG): container finished" podID="dee882a8-abdc-45cb-b451-121bd9579e0f" containerID="c8bbf20d55964d3101a974ead8aed6aab528464e5701ea73c2d46c21aa7b740b" exitCode=0 Dec 04 10:03:44 crc kubenswrapper[4707]: I1204 10:03:44.809990 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" event={"ID":"dee882a8-abdc-45cb-b451-121bd9579e0f","Type":"ContainerDied","Data":"1439692c614e0a37a6ece66472d71a5abd2d940577fd6cab4fd5836cfd138ba6"} Dec 04 10:03:44 crc kubenswrapper[4707]: I1204 10:03:44.810017 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" event={"ID":"dee882a8-abdc-45cb-b451-121bd9579e0f","Type":"ContainerDied","Data":"c8bbf20d55964d3101a974ead8aed6aab528464e5701ea73c2d46c21aa7b740b"} Dec 04 10:03:44 crc kubenswrapper[4707]: I1204 10:03:44.819110 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58"] Dec 04 10:03:44 crc kubenswrapper[4707]: I1204 10:03:44.831474 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dvcp58"] Dec 04 10:03:44 crc kubenswrapper[4707]: I1204 10:03:44.852132 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="727d9472-f299-41de-83d8-3d3d73c669e2" path="/var/lib/kubelet/pods/727d9472-f299-41de-83d8-3d3d73c669e2/volumes" Dec 04 10:03:44 crc kubenswrapper[4707]: I1204 10:03:44.889838 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.074165 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dee882a8-abdc-45cb-b451-121bd9579e0f-apiservice-cert\") pod \"dee882a8-abdc-45cb-b451-121bd9579e0f\" (UID: \"dee882a8-abdc-45cb-b451-121bd9579e0f\") " Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.074271 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfr2j\" (UniqueName: \"kubernetes.io/projected/dee882a8-abdc-45cb-b451-121bd9579e0f-kube-api-access-rfr2j\") pod \"dee882a8-abdc-45cb-b451-121bd9579e0f\" (UID: \"dee882a8-abdc-45cb-b451-121bd9579e0f\") " Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.074323 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dee882a8-abdc-45cb-b451-121bd9579e0f-webhook-cert\") pod \"dee882a8-abdc-45cb-b451-121bd9579e0f\" (UID: \"dee882a8-abdc-45cb-b451-121bd9579e0f\") " Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.078859 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dee882a8-abdc-45cb-b451-121bd9579e0f-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "dee882a8-abdc-45cb-b451-121bd9579e0f" (UID: "dee882a8-abdc-45cb-b451-121bd9579e0f"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.078970 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dee882a8-abdc-45cb-b451-121bd9579e0f-kube-api-access-rfr2j" (OuterVolumeSpecName: "kube-api-access-rfr2j") pod "dee882a8-abdc-45cb-b451-121bd9579e0f" (UID: "dee882a8-abdc-45cb-b451-121bd9579e0f"). InnerVolumeSpecName "kube-api-access-rfr2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.078999 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dee882a8-abdc-45cb-b451-121bd9579e0f-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "dee882a8-abdc-45cb-b451-121bd9579e0f" (UID: "dee882a8-abdc-45cb-b451-121bd9579e0f"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.123004 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-nd5wt" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.175729 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfr2j\" (UniqueName: \"kubernetes.io/projected/dee882a8-abdc-45cb-b451-121bd9579e0f-kube-api-access-rfr2j\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.175762 4707 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dee882a8-abdc-45cb-b451-121bd9579e0f-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.175772 4707 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dee882a8-abdc-45cb-b451-121bd9579e0f-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.277183 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28w9c\" (UniqueName: \"kubernetes.io/projected/fca8013f-27a6-4450-93d8-8a5949d66b59-kube-api-access-28w9c\") pod \"fca8013f-27a6-4450-93d8-8a5949d66b59\" (UID: \"fca8013f-27a6-4450-93d8-8a5949d66b59\") " Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.280858 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fca8013f-27a6-4450-93d8-8a5949d66b59-kube-api-access-28w9c" (OuterVolumeSpecName: "kube-api-access-28w9c") pod "fca8013f-27a6-4450-93d8-8a5949d66b59" (UID: "fca8013f-27a6-4450-93d8-8a5949d66b59"). InnerVolumeSpecName "kube-api-access-28w9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.377995 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28w9c\" (UniqueName: \"kubernetes.io/projected/fca8013f-27a6-4450-93d8-8a5949d66b59-kube-api-access-28w9c\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.767781 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l"] Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.768029 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" podUID="ee15d19c-9f87-47f6-b686-76e39c0477e9" containerName="manager" containerID="cri-o://df9f610b9a8ff47d0b12d8aa4b382ebc92006b941cc83ad4d6dc39724c06dfb4" gracePeriod=10 Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.817545 4707 generic.go:334] "Generic (PLEG): container finished" podID="fca8013f-27a6-4450-93d8-8a5949d66b59" containerID="c7e35a219b0ff347918508de2189b83c79d89eb038129a7160bd891d474d4ac2" exitCode=0 Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.817621 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-nd5wt" event={"ID":"fca8013f-27a6-4450-93d8-8a5949d66b59","Type":"ContainerDied","Data":"c7e35a219b0ff347918508de2189b83c79d89eb038129a7160bd891d474d4ac2"} Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.817649 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-nd5wt" event={"ID":"fca8013f-27a6-4450-93d8-8a5949d66b59","Type":"ContainerDied","Data":"004c46050ce40c6ea74adc25c8f8f1958e656deb918cfb3d36c99d33bd647a65"} Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.817664 4707 scope.go:117] "RemoveContainer" containerID="c7e35a219b0ff347918508de2189b83c79d89eb038129a7160bd891d474d4ac2" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.817781 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-nd5wt" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.821927 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" event={"ID":"dee882a8-abdc-45cb-b451-121bd9579e0f","Type":"ContainerDied","Data":"4ea83e6e3d4d9e45a69f585fa4ddb8ed8b12f3531c5d426ec389c9c24fe6292b"} Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.821976 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.875485 4707 scope.go:117] "RemoveContainer" containerID="c7e35a219b0ff347918508de2189b83c79d89eb038129a7160bd891d474d4ac2" Dec 04 10:03:45 crc kubenswrapper[4707]: E1204 10:03:45.876468 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7e35a219b0ff347918508de2189b83c79d89eb038129a7160bd891d474d4ac2\": container with ID starting with c7e35a219b0ff347918508de2189b83c79d89eb038129a7160bd891d474d4ac2 not found: ID does not exist" containerID="c7e35a219b0ff347918508de2189b83c79d89eb038129a7160bd891d474d4ac2" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.876548 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7e35a219b0ff347918508de2189b83c79d89eb038129a7160bd891d474d4ac2"} err="failed to get container status \"c7e35a219b0ff347918508de2189b83c79d89eb038129a7160bd891d474d4ac2\": rpc error: code = NotFound desc = could not find container \"c7e35a219b0ff347918508de2189b83c79d89eb038129a7160bd891d474d4ac2\": container with ID starting with c7e35a219b0ff347918508de2189b83c79d89eb038129a7160bd891d474d4ac2 not found: ID does not exist" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.876582 4707 scope.go:117] "RemoveContainer" containerID="1439692c614e0a37a6ece66472d71a5abd2d940577fd6cab4fd5836cfd138ba6" Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.882557 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc"] Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.898382 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fcdc5dbf4-s5hkc"] Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.912061 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-nd5wt"] Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.915196 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-nd5wt"] Dec 04 10:03:45 crc kubenswrapper[4707]: I1204 10:03:45.923549 4707 scope.go:117] "RemoveContainer" containerID="c8bbf20d55964d3101a974ead8aed6aab528464e5701ea73c2d46c21aa7b740b" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.044143 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-rtkfp"] Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.044401 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-rtkfp" podUID="f096c710-2d6f-4a74-b437-6557a3d009a4" containerName="registry-server" containerID="cri-o://b6ba2e5a99c8f0d6d22de7bdf7e4456dbc7c6faa1cc4a8edc8c4699df1ab0687" gracePeriod=30 Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.066217 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz"] Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.078070 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/55a3886cc1ed42812df4eab61c7a6033dc924d195539e8545c8f175f61tm2nz"] Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.203644 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.392578 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ee15d19c-9f87-47f6-b686-76e39c0477e9-apiservice-cert\") pod \"ee15d19c-9f87-47f6-b686-76e39c0477e9\" (UID: \"ee15d19c-9f87-47f6-b686-76e39c0477e9\") " Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.392700 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ee15d19c-9f87-47f6-b686-76e39c0477e9-webhook-cert\") pod \"ee15d19c-9f87-47f6-b686-76e39c0477e9\" (UID: \"ee15d19c-9f87-47f6-b686-76e39c0477e9\") " Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.393403 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7rm4\" (UniqueName: \"kubernetes.io/projected/ee15d19c-9f87-47f6-b686-76e39c0477e9-kube-api-access-q7rm4\") pod \"ee15d19c-9f87-47f6-b686-76e39c0477e9\" (UID: \"ee15d19c-9f87-47f6-b686-76e39c0477e9\") " Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.401251 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee15d19c-9f87-47f6-b686-76e39c0477e9-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "ee15d19c-9f87-47f6-b686-76e39c0477e9" (UID: "ee15d19c-9f87-47f6-b686-76e39c0477e9"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.401290 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee15d19c-9f87-47f6-b686-76e39c0477e9-kube-api-access-q7rm4" (OuterVolumeSpecName: "kube-api-access-q7rm4") pod "ee15d19c-9f87-47f6-b686-76e39c0477e9" (UID: "ee15d19c-9f87-47f6-b686-76e39c0477e9"). InnerVolumeSpecName "kube-api-access-q7rm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.401272 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee15d19c-9f87-47f6-b686-76e39c0477e9-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "ee15d19c-9f87-47f6-b686-76e39c0477e9" (UID: "ee15d19c-9f87-47f6-b686-76e39c0477e9"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.412531 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-rtkfp" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.493888 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwtcw\" (UniqueName: \"kubernetes.io/projected/f096c710-2d6f-4a74-b437-6557a3d009a4-kube-api-access-wwtcw\") pod \"f096c710-2d6f-4a74-b437-6557a3d009a4\" (UID: \"f096c710-2d6f-4a74-b437-6557a3d009a4\") " Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.494097 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7rm4\" (UniqueName: \"kubernetes.io/projected/ee15d19c-9f87-47f6-b686-76e39c0477e9-kube-api-access-q7rm4\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.494109 4707 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ee15d19c-9f87-47f6-b686-76e39c0477e9-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.494118 4707 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ee15d19c-9f87-47f6-b686-76e39c0477e9-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.496322 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f096c710-2d6f-4a74-b437-6557a3d009a4-kube-api-access-wwtcw" (OuterVolumeSpecName: "kube-api-access-wwtcw") pod "f096c710-2d6f-4a74-b437-6557a3d009a4" (UID: "f096c710-2d6f-4a74-b437-6557a3d009a4"). InnerVolumeSpecName "kube-api-access-wwtcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.595211 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwtcw\" (UniqueName: \"kubernetes.io/projected/f096c710-2d6f-4a74-b437-6557a3d009a4-kube-api-access-wwtcw\") on node \"crc\" DevicePath \"\"" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.828235 4707 generic.go:334] "Generic (PLEG): container finished" podID="ee15d19c-9f87-47f6-b686-76e39c0477e9" containerID="df9f610b9a8ff47d0b12d8aa4b382ebc92006b941cc83ad4d6dc39724c06dfb4" exitCode=0 Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.828277 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.828274 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" event={"ID":"ee15d19c-9f87-47f6-b686-76e39c0477e9","Type":"ContainerDied","Data":"df9f610b9a8ff47d0b12d8aa4b382ebc92006b941cc83ad4d6dc39724c06dfb4"} Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.828354 4707 scope.go:117] "RemoveContainer" containerID="df9f610b9a8ff47d0b12d8aa4b382ebc92006b941cc83ad4d6dc39724c06dfb4" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.828365 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l" event={"ID":"ee15d19c-9f87-47f6-b686-76e39c0477e9","Type":"ContainerDied","Data":"cdb02daaefb2a4383aae865758c2cebd7eeac72eccee9fabfc63289744d4361a"} Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.831980 4707 generic.go:334] "Generic (PLEG): container finished" podID="f096c710-2d6f-4a74-b437-6557a3d009a4" containerID="b6ba2e5a99c8f0d6d22de7bdf7e4456dbc7c6faa1cc4a8edc8c4699df1ab0687" exitCode=0 Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.832048 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-rtkfp" event={"ID":"f096c710-2d6f-4a74-b437-6557a3d009a4","Type":"ContainerDied","Data":"b6ba2e5a99c8f0d6d22de7bdf7e4456dbc7c6faa1cc4a8edc8c4699df1ab0687"} Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.832072 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-rtkfp" event={"ID":"f096c710-2d6f-4a74-b437-6557a3d009a4","Type":"ContainerDied","Data":"a4d9b8efe9aa4533e4c09c6c175b25d59e833e2c64c75fd1d3527f2f8aaf96a8"} Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.832077 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-rtkfp" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.848554 4707 scope.go:117] "RemoveContainer" containerID="df9f610b9a8ff47d0b12d8aa4b382ebc92006b941cc83ad4d6dc39724c06dfb4" Dec 04 10:03:46 crc kubenswrapper[4707]: E1204 10:03:46.849081 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df9f610b9a8ff47d0b12d8aa4b382ebc92006b941cc83ad4d6dc39724c06dfb4\": container with ID starting with df9f610b9a8ff47d0b12d8aa4b382ebc92006b941cc83ad4d6dc39724c06dfb4 not found: ID does not exist" containerID="df9f610b9a8ff47d0b12d8aa4b382ebc92006b941cc83ad4d6dc39724c06dfb4" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.849121 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df9f610b9a8ff47d0b12d8aa4b382ebc92006b941cc83ad4d6dc39724c06dfb4"} err="failed to get container status \"df9f610b9a8ff47d0b12d8aa4b382ebc92006b941cc83ad4d6dc39724c06dfb4\": rpc error: code = NotFound desc = could not find container \"df9f610b9a8ff47d0b12d8aa4b382ebc92006b941cc83ad4d6dc39724c06dfb4\": container with ID starting with df9f610b9a8ff47d0b12d8aa4b382ebc92006b941cc83ad4d6dc39724c06dfb4 not found: ID does not exist" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.849149 4707 scope.go:117] "RemoveContainer" containerID="b6ba2e5a99c8f0d6d22de7bdf7e4456dbc7c6faa1cc4a8edc8c4699df1ab0687" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.859737 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="934d114a-0b4c-4bfe-aca3-5518bf105171" path="/var/lib/kubelet/pods/934d114a-0b4c-4bfe-aca3-5518bf105171/volumes" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.861151 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dee882a8-abdc-45cb-b451-121bd9579e0f" path="/var/lib/kubelet/pods/dee882a8-abdc-45cb-b451-121bd9579e0f/volumes" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.861763 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fca8013f-27a6-4450-93d8-8a5949d66b59" path="/var/lib/kubelet/pods/fca8013f-27a6-4450-93d8-8a5949d66b59/volumes" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.862275 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l"] Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.867235 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c4ddb68d5-kpz9l"] Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.876556 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-rtkfp"] Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.879579 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-rtkfp"] Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.882090 4707 scope.go:117] "RemoveContainer" containerID="b6ba2e5a99c8f0d6d22de7bdf7e4456dbc7c6faa1cc4a8edc8c4699df1ab0687" Dec 04 10:03:46 crc kubenswrapper[4707]: E1204 10:03:46.882466 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6ba2e5a99c8f0d6d22de7bdf7e4456dbc7c6faa1cc4a8edc8c4699df1ab0687\": container with ID starting with b6ba2e5a99c8f0d6d22de7bdf7e4456dbc7c6faa1cc4a8edc8c4699df1ab0687 not found: ID does not exist" containerID="b6ba2e5a99c8f0d6d22de7bdf7e4456dbc7c6faa1cc4a8edc8c4699df1ab0687" Dec 04 10:03:46 crc kubenswrapper[4707]: I1204 10:03:46.882496 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6ba2e5a99c8f0d6d22de7bdf7e4456dbc7c6faa1cc4a8edc8c4699df1ab0687"} err="failed to get container status \"b6ba2e5a99c8f0d6d22de7bdf7e4456dbc7c6faa1cc4a8edc8c4699df1ab0687\": rpc error: code = NotFound desc = could not find container \"b6ba2e5a99c8f0d6d22de7bdf7e4456dbc7c6faa1cc4a8edc8c4699df1ab0687\": container with ID starting with b6ba2e5a99c8f0d6d22de7bdf7e4456dbc7c6faa1cc4a8edc8c4699df1ab0687 not found: ID does not exist" Dec 04 10:03:48 crc kubenswrapper[4707]: I1204 10:03:48.852834 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee15d19c-9f87-47f6-b686-76e39c0477e9" path="/var/lib/kubelet/pods/ee15d19c-9f87-47f6-b686-76e39c0477e9/volumes" Dec 04 10:03:48 crc kubenswrapper[4707]: I1204 10:03:48.854220 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f096c710-2d6f-4a74-b437-6557a3d009a4" path="/var/lib/kubelet/pods/f096c710-2d6f-4a74-b437-6557a3d009a4/volumes" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.614153 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jdrpr/must-gather-l8l4v"] Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.614888 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b373699-1303-4b1a-914d-7764376f5b38" containerName="mysql-bootstrap" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.614904 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b373699-1303-4b1a-914d-7764376f5b38" containerName="mysql-bootstrap" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.614911 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5838998-f07f-429c-916c-6ad39edd46cd" containerName="mariadb-account-delete" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.614917 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5838998-f07f-429c-916c-6ad39edd46cd" containerName="mariadb-account-delete" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.614927 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51dd2aae-c620-4d95-b261-1cb6065096e3" containerName="rabbitmq" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.614933 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="51dd2aae-c620-4d95-b261-1cb6065096e3" containerName="rabbitmq" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.614941 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5838998-f07f-429c-916c-6ad39edd46cd" containerName="mariadb-account-delete" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.614946 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5838998-f07f-429c-916c-6ad39edd46cd" containerName="mariadb-account-delete" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.614954 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f096c710-2d6f-4a74-b437-6557a3d009a4" containerName="registry-server" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.614960 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f096c710-2d6f-4a74-b437-6557a3d009a4" containerName="registry-server" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.614966 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51dd2aae-c620-4d95-b261-1cb6065096e3" containerName="setup-container" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.614972 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="51dd2aae-c620-4d95-b261-1cb6065096e3" containerName="setup-container" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.614982 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07f17250-36c1-4129-aa10-2e5c81fed559" containerName="registry-server" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.614989 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="07f17250-36c1-4129-aa10-2e5c81fed559" containerName="registry-server" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.614999 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b38182e9-ec12-42a7-b506-83ba39b9042c" containerName="memcached" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615005 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="b38182e9-ec12-42a7-b506-83ba39b9042c" containerName="memcached" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615015 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff3cb35d-0e1e-41a5-ba41-efc60015c860" containerName="operator" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615020 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff3cb35d-0e1e-41a5-ba41-efc60015c860" containerName="operator" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615030 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79cd0cfb-7c57-4a38-97c7-a40a24097d29" containerName="mysql-bootstrap" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615036 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="79cd0cfb-7c57-4a38-97c7-a40a24097d29" containerName="mysql-bootstrap" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615042 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b373699-1303-4b1a-914d-7764376f5b38" containerName="galera" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615048 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b373699-1303-4b1a-914d-7764376f5b38" containerName="galera" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615055 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1faad1a1-281e-4341-8bef-0a5b6c8051e4" containerName="registry-server" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615060 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="1faad1a1-281e-4341-8bef-0a5b6c8051e4" containerName="registry-server" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615066 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2538c764-a696-4ce4-95fa-58c782e0b71f" containerName="mysql-bootstrap" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615071 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="2538c764-a696-4ce4-95fa-58c782e0b71f" containerName="mysql-bootstrap" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615079 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dee882a8-abdc-45cb-b451-121bd9579e0f" containerName="kube-rbac-proxy" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615085 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="dee882a8-abdc-45cb-b451-121bd9579e0f" containerName="kube-rbac-proxy" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615092 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fca8013f-27a6-4450-93d8-8a5949d66b59" containerName="registry-server" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615098 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="fca8013f-27a6-4450-93d8-8a5949d66b59" containerName="registry-server" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615109 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e85dcf5-11e1-48ad-b884-404af35dd76a" containerName="manager" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615114 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e85dcf5-11e1-48ad-b884-404af35dd76a" containerName="manager" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615120 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae5166d7-2ccb-4e29-8066-7b355eb947cc" containerName="ceph" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615128 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae5166d7-2ccb-4e29-8066-7b355eb947cc" containerName="ceph" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615136 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79cd0cfb-7c57-4a38-97c7-a40a24097d29" containerName="galera" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615141 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="79cd0cfb-7c57-4a38-97c7-a40a24097d29" containerName="galera" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615151 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dee882a8-abdc-45cb-b451-121bd9579e0f" containerName="manager" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615158 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="dee882a8-abdc-45cb-b451-121bd9579e0f" containerName="manager" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615167 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37fbf63e-d62a-4ea7-b15c-c34c72aab829" containerName="keystone-api" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615173 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="37fbf63e-d62a-4ea7-b15c-c34c72aab829" containerName="keystone-api" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615184 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce" containerName="manila-service-cleanup-n5b5h655" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615189 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce" containerName="manila-service-cleanup-n5b5h655" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615197 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee15d19c-9f87-47f6-b686-76e39c0477e9" containerName="manager" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615202 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee15d19c-9f87-47f6-b686-76e39c0477e9" containerName="manager" Dec 04 10:04:12 crc kubenswrapper[4707]: E1204 10:04:12.615211 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2538c764-a696-4ce4-95fa-58c782e0b71f" containerName="galera" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615216 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="2538c764-a696-4ce4-95fa-58c782e0b71f" containerName="galera" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615304 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3486ad0-f43f-4bf0-bf5c-7fa81c0066ce" containerName="manila-service-cleanup-n5b5h655" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615314 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="2538c764-a696-4ce4-95fa-58c782e0b71f" containerName="galera" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615320 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="b38182e9-ec12-42a7-b506-83ba39b9042c" containerName="memcached" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615331 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae5166d7-2ccb-4e29-8066-7b355eb947cc" containerName="ceph" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615342 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="fca8013f-27a6-4450-93d8-8a5949d66b59" containerName="registry-server" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615348 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="f096c710-2d6f-4a74-b437-6557a3d009a4" containerName="registry-server" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615368 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="dee882a8-abdc-45cb-b451-121bd9579e0f" containerName="manager" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615376 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="07f17250-36c1-4129-aa10-2e5c81fed559" containerName="registry-server" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615382 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="dee882a8-abdc-45cb-b451-121bd9579e0f" containerName="kube-rbac-proxy" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615391 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e85dcf5-11e1-48ad-b884-404af35dd76a" containerName="manager" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615398 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="1faad1a1-281e-4341-8bef-0a5b6c8051e4" containerName="registry-server" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615406 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="51dd2aae-c620-4d95-b261-1cb6065096e3" containerName="rabbitmq" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615413 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff3cb35d-0e1e-41a5-ba41-efc60015c860" containerName="operator" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615420 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b373699-1303-4b1a-914d-7764376f5b38" containerName="galera" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615429 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5838998-f07f-429c-916c-6ad39edd46cd" containerName="mariadb-account-delete" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615437 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="79cd0cfb-7c57-4a38-97c7-a40a24097d29" containerName="galera" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615443 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5838998-f07f-429c-916c-6ad39edd46cd" containerName="mariadb-account-delete" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615464 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee15d19c-9f87-47f6-b686-76e39c0477e9" containerName="manager" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.615471 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="37fbf63e-d62a-4ea7-b15c-c34c72aab829" containerName="keystone-api" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.616175 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdrpr/must-gather-l8l4v" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.618707 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jdrpr"/"openshift-service-ca.crt" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.618928 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jdrpr"/"kube-root-ca.crt" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.631592 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jdrpr/must-gather-l8l4v"] Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.717371 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7fd40b26-2ba1-4f8f-93ba-050266ad3fde-must-gather-output\") pod \"must-gather-l8l4v\" (UID: \"7fd40b26-2ba1-4f8f-93ba-050266ad3fde\") " pod="openshift-must-gather-jdrpr/must-gather-l8l4v" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.717464 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lp85\" (UniqueName: \"kubernetes.io/projected/7fd40b26-2ba1-4f8f-93ba-050266ad3fde-kube-api-access-9lp85\") pod \"must-gather-l8l4v\" (UID: \"7fd40b26-2ba1-4f8f-93ba-050266ad3fde\") " pod="openshift-must-gather-jdrpr/must-gather-l8l4v" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.819127 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7fd40b26-2ba1-4f8f-93ba-050266ad3fde-must-gather-output\") pod \"must-gather-l8l4v\" (UID: \"7fd40b26-2ba1-4f8f-93ba-050266ad3fde\") " pod="openshift-must-gather-jdrpr/must-gather-l8l4v" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.819210 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lp85\" (UniqueName: \"kubernetes.io/projected/7fd40b26-2ba1-4f8f-93ba-050266ad3fde-kube-api-access-9lp85\") pod \"must-gather-l8l4v\" (UID: \"7fd40b26-2ba1-4f8f-93ba-050266ad3fde\") " pod="openshift-must-gather-jdrpr/must-gather-l8l4v" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.819645 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7fd40b26-2ba1-4f8f-93ba-050266ad3fde-must-gather-output\") pod \"must-gather-l8l4v\" (UID: \"7fd40b26-2ba1-4f8f-93ba-050266ad3fde\") " pod="openshift-must-gather-jdrpr/must-gather-l8l4v" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.839222 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lp85\" (UniqueName: \"kubernetes.io/projected/7fd40b26-2ba1-4f8f-93ba-050266ad3fde-kube-api-access-9lp85\") pod \"must-gather-l8l4v\" (UID: \"7fd40b26-2ba1-4f8f-93ba-050266ad3fde\") " pod="openshift-must-gather-jdrpr/must-gather-l8l4v" Dec 04 10:04:12 crc kubenswrapper[4707]: I1204 10:04:12.933848 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdrpr/must-gather-l8l4v" Dec 04 10:04:13 crc kubenswrapper[4707]: I1204 10:04:13.123886 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jdrpr/must-gather-l8l4v"] Dec 04 10:04:13 crc kubenswrapper[4707]: I1204 10:04:13.135467 4707 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 10:04:14 crc kubenswrapper[4707]: I1204 10:04:14.005297 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdrpr/must-gather-l8l4v" event={"ID":"7fd40b26-2ba1-4f8f-93ba-050266ad3fde","Type":"ContainerStarted","Data":"4545795fd2ac5cd427ce59d7168eeb612736990579f5c32f6dbac191aa1c6f48"} Dec 04 10:04:18 crc kubenswrapper[4707]: I1204 10:04:18.033533 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdrpr/must-gather-l8l4v" event={"ID":"7fd40b26-2ba1-4f8f-93ba-050266ad3fde","Type":"ContainerStarted","Data":"fd9a86cb6f5d551f51cb91bb2e0676c0ef387f6f1889f206838e36e808381385"} Dec 04 10:04:18 crc kubenswrapper[4707]: I1204 10:04:18.034164 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdrpr/must-gather-l8l4v" event={"ID":"7fd40b26-2ba1-4f8f-93ba-050266ad3fde","Type":"ContainerStarted","Data":"6816820c779ddf1219a2c6fe304ba1b93c81fa4b67ac868700de924a45f8667d"} Dec 04 10:04:18 crc kubenswrapper[4707]: I1204 10:04:18.047906 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jdrpr/must-gather-l8l4v" podStartSLOduration=2.294573117 podStartE2EDuration="6.047890525s" podCreationTimestamp="2025-12-04 10:04:12 +0000 UTC" firstStartedPulling="2025-12-04 10:04:13.135426361 +0000 UTC m=+1552.571248868" lastFinishedPulling="2025-12-04 10:04:16.888743769 +0000 UTC m=+1556.324566276" observedRunningTime="2025-12-04 10:04:18.046731589 +0000 UTC m=+1557.482554156" watchObservedRunningTime="2025-12-04 10:04:18.047890525 +0000 UTC m=+1557.483713032" Dec 04 10:04:22 crc kubenswrapper[4707]: I1204 10:04:22.229522 4707 scope.go:117] "RemoveContainer" containerID="e02e254a788bc6a4e8e7c21dba78c9d4c0af4187f53ff757525a41306aa5d843" Dec 04 10:04:22 crc kubenswrapper[4707]: I1204 10:04:22.257264 4707 scope.go:117] "RemoveContainer" containerID="f457bb18a9a6958560e780e29f57edb2b9dc4864778e9bf00c426dd5094be93a" Dec 04 10:04:22 crc kubenswrapper[4707]: I1204 10:04:22.275824 4707 scope.go:117] "RemoveContainer" containerID="0300df74318d501be1b33daefb1051812504535f8956dc6b9a249f69f0ca529b" Dec 04 10:04:22 crc kubenswrapper[4707]: I1204 10:04:22.295357 4707 scope.go:117] "RemoveContainer" containerID="f2a062a845f6899105a1d1e8c1b827e95733dec4f29b0fd82090c587a47cac36" Dec 04 10:04:22 crc kubenswrapper[4707]: I1204 10:04:22.319438 4707 scope.go:117] "RemoveContainer" containerID="9d55cdc04fafa761d32f9ed44f241091b5c5bae804a28d5639a52bf91beaa44b" Dec 04 10:04:22 crc kubenswrapper[4707]: I1204 10:04:22.335037 4707 scope.go:117] "RemoveContainer" containerID="d7b3c47c22932b48c16ad0512e043b70a26c15533d71f03899ceee0d1114b255" Dec 04 10:04:22 crc kubenswrapper[4707]: I1204 10:04:22.359042 4707 scope.go:117] "RemoveContainer" containerID="15763f03267645202b4ca0e478c0c888d3a67cb74adfe0438694c8104554fab9" Dec 04 10:04:22 crc kubenswrapper[4707]: I1204 10:04:22.374164 4707 scope.go:117] "RemoveContainer" containerID="a8835b4f5add4ca685fde584dac75622ac4adef9095cc7a0118176dcc0878afe" Dec 04 10:04:22 crc kubenswrapper[4707]: I1204 10:04:22.390935 4707 scope.go:117] "RemoveContainer" containerID="eb308450cd911f4b7b823794d6516a3f6e2c12535207e9f3d7d1f11d0fbe407f" Dec 04 10:04:22 crc kubenswrapper[4707]: I1204 10:04:22.410977 4707 scope.go:117] "RemoveContainer" containerID="227627d594ad40441a26d1664b3bef384e48816c51cb19f672f02426568bb9aa" Dec 04 10:04:22 crc kubenswrapper[4707]: I1204 10:04:22.425635 4707 scope.go:117] "RemoveContainer" containerID="63fae04690d14548a142422581105f91635478ef809a563ca303196093ec0efb" Dec 04 10:04:22 crc kubenswrapper[4707]: I1204 10:04:22.443357 4707 scope.go:117] "RemoveContainer" containerID="fc6facd73b61b7e1a21d05c1bb0b4b5444785f59b0ddcde92bba3041bf76de0f" Dec 04 10:04:45 crc kubenswrapper[4707]: I1204 10:04:45.129471 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gcsvt"] Dec 04 10:04:45 crc kubenswrapper[4707]: I1204 10:04:45.131079 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:45 crc kubenswrapper[4707]: I1204 10:04:45.143993 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gcsvt"] Dec 04 10:04:45 crc kubenswrapper[4707]: I1204 10:04:45.236256 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7knx\" (UniqueName: \"kubernetes.io/projected/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-kube-api-access-l7knx\") pod \"redhat-marketplace-gcsvt\" (UID: \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\") " pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:45 crc kubenswrapper[4707]: I1204 10:04:45.236602 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-catalog-content\") pod \"redhat-marketplace-gcsvt\" (UID: \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\") " pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:45 crc kubenswrapper[4707]: I1204 10:04:45.236726 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-utilities\") pod \"redhat-marketplace-gcsvt\" (UID: \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\") " pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:45 crc kubenswrapper[4707]: I1204 10:04:45.337540 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7knx\" (UniqueName: \"kubernetes.io/projected/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-kube-api-access-l7knx\") pod \"redhat-marketplace-gcsvt\" (UID: \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\") " pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:45 crc kubenswrapper[4707]: I1204 10:04:45.337803 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-catalog-content\") pod \"redhat-marketplace-gcsvt\" (UID: \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\") " pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:45 crc kubenswrapper[4707]: I1204 10:04:45.337895 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-utilities\") pod \"redhat-marketplace-gcsvt\" (UID: \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\") " pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:45 crc kubenswrapper[4707]: I1204 10:04:45.338307 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-catalog-content\") pod \"redhat-marketplace-gcsvt\" (UID: \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\") " pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:45 crc kubenswrapper[4707]: I1204 10:04:45.338566 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-utilities\") pod \"redhat-marketplace-gcsvt\" (UID: \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\") " pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:45 crc kubenswrapper[4707]: I1204 10:04:45.359682 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7knx\" (UniqueName: \"kubernetes.io/projected/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-kube-api-access-l7knx\") pod \"redhat-marketplace-gcsvt\" (UID: \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\") " pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:45 crc kubenswrapper[4707]: I1204 10:04:45.451797 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:45 crc kubenswrapper[4707]: I1204 10:04:45.907507 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gcsvt"] Dec 04 10:04:46 crc kubenswrapper[4707]: I1204 10:04:46.189244 4707 generic.go:334] "Generic (PLEG): container finished" podID="af00f037-e6f1-4b2d-875f-9d5a0aad93a8" containerID="5495d328fd348ac506912d95badc58f8fb9506f7337310bb2eaf5af21974eae7" exitCode=0 Dec 04 10:04:46 crc kubenswrapper[4707]: I1204 10:04:46.189279 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcsvt" event={"ID":"af00f037-e6f1-4b2d-875f-9d5a0aad93a8","Type":"ContainerDied","Data":"5495d328fd348ac506912d95badc58f8fb9506f7337310bb2eaf5af21974eae7"} Dec 04 10:04:46 crc kubenswrapper[4707]: I1204 10:04:46.189303 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcsvt" event={"ID":"af00f037-e6f1-4b2d-875f-9d5a0aad93a8","Type":"ContainerStarted","Data":"a9d3a99b11d89de74f80e99930bbfcd2282fc9e00d2f664ebec990fd95c096d6"} Dec 04 10:04:47 crc kubenswrapper[4707]: I1204 10:04:47.196478 4707 generic.go:334] "Generic (PLEG): container finished" podID="af00f037-e6f1-4b2d-875f-9d5a0aad93a8" containerID="a1bb81a3adfc47f80d68703dacd1a2b36e843d937139c0fb455866f51bee0719" exitCode=0 Dec 04 10:04:47 crc kubenswrapper[4707]: I1204 10:04:47.196826 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcsvt" event={"ID":"af00f037-e6f1-4b2d-875f-9d5a0aad93a8","Type":"ContainerDied","Data":"a1bb81a3adfc47f80d68703dacd1a2b36e843d937139c0fb455866f51bee0719"} Dec 04 10:04:48 crc kubenswrapper[4707]: I1204 10:04:48.203576 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcsvt" event={"ID":"af00f037-e6f1-4b2d-875f-9d5a0aad93a8","Type":"ContainerStarted","Data":"360e8ce890590cae5eea231e6f1dd63441e192099316a3f559f8789f228fa41b"} Dec 04 10:04:48 crc kubenswrapper[4707]: I1204 10:04:48.220603 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gcsvt" podStartSLOduration=1.832278557 podStartE2EDuration="3.22058186s" podCreationTimestamp="2025-12-04 10:04:45 +0000 UTC" firstStartedPulling="2025-12-04 10:04:46.19139096 +0000 UTC m=+1585.627213467" lastFinishedPulling="2025-12-04 10:04:47.579694263 +0000 UTC m=+1587.015516770" observedRunningTime="2025-12-04 10:04:48.218154543 +0000 UTC m=+1587.653977070" watchObservedRunningTime="2025-12-04 10:04:48.22058186 +0000 UTC m=+1587.656404367" Dec 04 10:04:54 crc kubenswrapper[4707]: I1204 10:04:54.425000 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tfwml"] Dec 04 10:04:54 crc kubenswrapper[4707]: I1204 10:04:54.426714 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:04:54 crc kubenswrapper[4707]: I1204 10:04:54.443783 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tfwml"] Dec 04 10:04:54 crc kubenswrapper[4707]: I1204 10:04:54.552662 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee3663a0-f580-4d5d-8c38-adda443b7934-utilities\") pod \"certified-operators-tfwml\" (UID: \"ee3663a0-f580-4d5d-8c38-adda443b7934\") " pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:04:54 crc kubenswrapper[4707]: I1204 10:04:54.552730 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfs94\" (UniqueName: \"kubernetes.io/projected/ee3663a0-f580-4d5d-8c38-adda443b7934-kube-api-access-gfs94\") pod \"certified-operators-tfwml\" (UID: \"ee3663a0-f580-4d5d-8c38-adda443b7934\") " pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:04:54 crc kubenswrapper[4707]: I1204 10:04:54.552765 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee3663a0-f580-4d5d-8c38-adda443b7934-catalog-content\") pod \"certified-operators-tfwml\" (UID: \"ee3663a0-f580-4d5d-8c38-adda443b7934\") " pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:04:54 crc kubenswrapper[4707]: I1204 10:04:54.653750 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee3663a0-f580-4d5d-8c38-adda443b7934-utilities\") pod \"certified-operators-tfwml\" (UID: \"ee3663a0-f580-4d5d-8c38-adda443b7934\") " pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:04:54 crc kubenswrapper[4707]: I1204 10:04:54.653825 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfs94\" (UniqueName: \"kubernetes.io/projected/ee3663a0-f580-4d5d-8c38-adda443b7934-kube-api-access-gfs94\") pod \"certified-operators-tfwml\" (UID: \"ee3663a0-f580-4d5d-8c38-adda443b7934\") " pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:04:54 crc kubenswrapper[4707]: I1204 10:04:54.653872 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee3663a0-f580-4d5d-8c38-adda443b7934-catalog-content\") pod \"certified-operators-tfwml\" (UID: \"ee3663a0-f580-4d5d-8c38-adda443b7934\") " pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:04:54 crc kubenswrapper[4707]: I1204 10:04:54.654289 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee3663a0-f580-4d5d-8c38-adda443b7934-utilities\") pod \"certified-operators-tfwml\" (UID: \"ee3663a0-f580-4d5d-8c38-adda443b7934\") " pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:04:54 crc kubenswrapper[4707]: I1204 10:04:54.654391 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee3663a0-f580-4d5d-8c38-adda443b7934-catalog-content\") pod \"certified-operators-tfwml\" (UID: \"ee3663a0-f580-4d5d-8c38-adda443b7934\") " pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:04:54 crc kubenswrapper[4707]: I1204 10:04:54.672796 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfs94\" (UniqueName: \"kubernetes.io/projected/ee3663a0-f580-4d5d-8c38-adda443b7934-kube-api-access-gfs94\") pod \"certified-operators-tfwml\" (UID: \"ee3663a0-f580-4d5d-8c38-adda443b7934\") " pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:04:54 crc kubenswrapper[4707]: I1204 10:04:54.744051 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:04:55 crc kubenswrapper[4707]: I1204 10:04:55.009509 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tfwml"] Dec 04 10:04:55 crc kubenswrapper[4707]: W1204 10:04:55.021402 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee3663a0_f580_4d5d_8c38_adda443b7934.slice/crio-2935a7bd12be8ba7c18d80522e4cd2fb0baf226dfd3d275c97d497e219979c54 WatchSource:0}: Error finding container 2935a7bd12be8ba7c18d80522e4cd2fb0baf226dfd3d275c97d497e219979c54: Status 404 returned error can't find the container with id 2935a7bd12be8ba7c18d80522e4cd2fb0baf226dfd3d275c97d497e219979c54 Dec 04 10:04:55 crc kubenswrapper[4707]: I1204 10:04:55.246495 4707 generic.go:334] "Generic (PLEG): container finished" podID="ee3663a0-f580-4d5d-8c38-adda443b7934" containerID="fd073ada074680951513fb5000fd5ece42b04a2ff66e29164974c91dd308ddaf" exitCode=0 Dec 04 10:04:55 crc kubenswrapper[4707]: I1204 10:04:55.246653 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tfwml" event={"ID":"ee3663a0-f580-4d5d-8c38-adda443b7934","Type":"ContainerDied","Data":"fd073ada074680951513fb5000fd5ece42b04a2ff66e29164974c91dd308ddaf"} Dec 04 10:04:55 crc kubenswrapper[4707]: I1204 10:04:55.246822 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tfwml" event={"ID":"ee3663a0-f580-4d5d-8c38-adda443b7934","Type":"ContainerStarted","Data":"2935a7bd12be8ba7c18d80522e4cd2fb0baf226dfd3d275c97d497e219979c54"} Dec 04 10:04:55 crc kubenswrapper[4707]: I1204 10:04:55.452784 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:55 crc kubenswrapper[4707]: I1204 10:04:55.453609 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:55 crc kubenswrapper[4707]: I1204 10:04:55.494466 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:56 crc kubenswrapper[4707]: I1204 10:04:56.253893 4707 generic.go:334] "Generic (PLEG): container finished" podID="ee3663a0-f580-4d5d-8c38-adda443b7934" containerID="e5b3a2fbdae4af4aa25ff42dce4598567f45348ad2c7e0ebc5c0cc28537179d5" exitCode=0 Dec 04 10:04:56 crc kubenswrapper[4707]: I1204 10:04:56.253930 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tfwml" event={"ID":"ee3663a0-f580-4d5d-8c38-adda443b7934","Type":"ContainerDied","Data":"e5b3a2fbdae4af4aa25ff42dce4598567f45348ad2c7e0ebc5c0cc28537179d5"} Dec 04 10:04:56 crc kubenswrapper[4707]: I1204 10:04:56.294909 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:57 crc kubenswrapper[4707]: I1204 10:04:57.262374 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tfwml" event={"ID":"ee3663a0-f580-4d5d-8c38-adda443b7934","Type":"ContainerStarted","Data":"6b9403ca0128127ab9353839ff9af7d8d46f383dffca9d491e5a810cdae77e84"} Dec 04 10:04:57 crc kubenswrapper[4707]: I1204 10:04:57.278421 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tfwml" podStartSLOduration=1.689232358 podStartE2EDuration="3.278401997s" podCreationTimestamp="2025-12-04 10:04:54 +0000 UTC" firstStartedPulling="2025-12-04 10:04:55.248146954 +0000 UTC m=+1594.683969461" lastFinishedPulling="2025-12-04 10:04:56.837316593 +0000 UTC m=+1596.273139100" observedRunningTime="2025-12-04 10:04:57.276695763 +0000 UTC m=+1596.712518280" watchObservedRunningTime="2025-12-04 10:04:57.278401997 +0000 UTC m=+1596.714224504" Dec 04 10:04:57 crc kubenswrapper[4707]: I1204 10:04:57.804431 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gcsvt"] Dec 04 10:04:58 crc kubenswrapper[4707]: I1204 10:04:58.655113 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-dwmkj_7528abe2-fb27-4c14-88c6-98fcbb716395/control-plane-machine-set-operator/0.log" Dec 04 10:04:58 crc kubenswrapper[4707]: I1204 10:04:58.833127 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-22vzf_5a08fa03-e041-425a-b5e8-05300cdac87b/machine-api-operator/0.log" Dec 04 10:04:58 crc kubenswrapper[4707]: I1204 10:04:58.854644 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-22vzf_5a08fa03-e041-425a-b5e8-05300cdac87b/kube-rbac-proxy/0.log" Dec 04 10:04:59 crc kubenswrapper[4707]: I1204 10:04:59.276134 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gcsvt" podUID="af00f037-e6f1-4b2d-875f-9d5a0aad93a8" containerName="registry-server" containerID="cri-o://360e8ce890590cae5eea231e6f1dd63441e192099316a3f559f8789f228fa41b" gracePeriod=2 Dec 04 10:04:59 crc kubenswrapper[4707]: I1204 10:04:59.657998 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:04:59 crc kubenswrapper[4707]: I1204 10:04:59.730910 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-utilities\") pod \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\" (UID: \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\") " Dec 04 10:04:59 crc kubenswrapper[4707]: I1204 10:04:59.731721 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-utilities" (OuterVolumeSpecName: "utilities") pod "af00f037-e6f1-4b2d-875f-9d5a0aad93a8" (UID: "af00f037-e6f1-4b2d-875f-9d5a0aad93a8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:04:59 crc kubenswrapper[4707]: I1204 10:04:59.731999 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 10:04:59 crc kubenswrapper[4707]: I1204 10:04:59.832556 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7knx\" (UniqueName: \"kubernetes.io/projected/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-kube-api-access-l7knx\") pod \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\" (UID: \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\") " Dec 04 10:04:59 crc kubenswrapper[4707]: I1204 10:04:59.832655 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-catalog-content\") pod \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\" (UID: \"af00f037-e6f1-4b2d-875f-9d5a0aad93a8\") " Dec 04 10:04:59 crc kubenswrapper[4707]: I1204 10:04:59.841728 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-kube-api-access-l7knx" (OuterVolumeSpecName: "kube-api-access-l7knx") pod "af00f037-e6f1-4b2d-875f-9d5a0aad93a8" (UID: "af00f037-e6f1-4b2d-875f-9d5a0aad93a8"). InnerVolumeSpecName "kube-api-access-l7knx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:04:59 crc kubenswrapper[4707]: I1204 10:04:59.856715 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af00f037-e6f1-4b2d-875f-9d5a0aad93a8" (UID: "af00f037-e6f1-4b2d-875f-9d5a0aad93a8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:04:59 crc kubenswrapper[4707]: I1204 10:04:59.933712 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7knx\" (UniqueName: \"kubernetes.io/projected/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-kube-api-access-l7knx\") on node \"crc\" DevicePath \"\"" Dec 04 10:04:59 crc kubenswrapper[4707]: I1204 10:04:59.933759 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af00f037-e6f1-4b2d-875f-9d5a0aad93a8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.284587 4707 generic.go:334] "Generic (PLEG): container finished" podID="af00f037-e6f1-4b2d-875f-9d5a0aad93a8" containerID="360e8ce890590cae5eea231e6f1dd63441e192099316a3f559f8789f228fa41b" exitCode=0 Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.284650 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcsvt" event={"ID":"af00f037-e6f1-4b2d-875f-9d5a0aad93a8","Type":"ContainerDied","Data":"360e8ce890590cae5eea231e6f1dd63441e192099316a3f559f8789f228fa41b"} Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.284667 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gcsvt" Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.284698 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcsvt" event={"ID":"af00f037-e6f1-4b2d-875f-9d5a0aad93a8","Type":"ContainerDied","Data":"a9d3a99b11d89de74f80e99930bbfcd2282fc9e00d2f664ebec990fd95c096d6"} Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.284752 4707 scope.go:117] "RemoveContainer" containerID="360e8ce890590cae5eea231e6f1dd63441e192099316a3f559f8789f228fa41b" Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.305380 4707 scope.go:117] "RemoveContainer" containerID="a1bb81a3adfc47f80d68703dacd1a2b36e843d937139c0fb455866f51bee0719" Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.326484 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gcsvt"] Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.334402 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gcsvt"] Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.347031 4707 scope.go:117] "RemoveContainer" containerID="5495d328fd348ac506912d95badc58f8fb9506f7337310bb2eaf5af21974eae7" Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.365919 4707 scope.go:117] "RemoveContainer" containerID="360e8ce890590cae5eea231e6f1dd63441e192099316a3f559f8789f228fa41b" Dec 04 10:05:00 crc kubenswrapper[4707]: E1204 10:05:00.366642 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"360e8ce890590cae5eea231e6f1dd63441e192099316a3f559f8789f228fa41b\": container with ID starting with 360e8ce890590cae5eea231e6f1dd63441e192099316a3f559f8789f228fa41b not found: ID does not exist" containerID="360e8ce890590cae5eea231e6f1dd63441e192099316a3f559f8789f228fa41b" Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.366713 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"360e8ce890590cae5eea231e6f1dd63441e192099316a3f559f8789f228fa41b"} err="failed to get container status \"360e8ce890590cae5eea231e6f1dd63441e192099316a3f559f8789f228fa41b\": rpc error: code = NotFound desc = could not find container \"360e8ce890590cae5eea231e6f1dd63441e192099316a3f559f8789f228fa41b\": container with ID starting with 360e8ce890590cae5eea231e6f1dd63441e192099316a3f559f8789f228fa41b not found: ID does not exist" Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.366749 4707 scope.go:117] "RemoveContainer" containerID="a1bb81a3adfc47f80d68703dacd1a2b36e843d937139c0fb455866f51bee0719" Dec 04 10:05:00 crc kubenswrapper[4707]: E1204 10:05:00.367172 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1bb81a3adfc47f80d68703dacd1a2b36e843d937139c0fb455866f51bee0719\": container with ID starting with a1bb81a3adfc47f80d68703dacd1a2b36e843d937139c0fb455866f51bee0719 not found: ID does not exist" containerID="a1bb81a3adfc47f80d68703dacd1a2b36e843d937139c0fb455866f51bee0719" Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.367225 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1bb81a3adfc47f80d68703dacd1a2b36e843d937139c0fb455866f51bee0719"} err="failed to get container status \"a1bb81a3adfc47f80d68703dacd1a2b36e843d937139c0fb455866f51bee0719\": rpc error: code = NotFound desc = could not find container \"a1bb81a3adfc47f80d68703dacd1a2b36e843d937139c0fb455866f51bee0719\": container with ID starting with a1bb81a3adfc47f80d68703dacd1a2b36e843d937139c0fb455866f51bee0719 not found: ID does not exist" Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.367256 4707 scope.go:117] "RemoveContainer" containerID="5495d328fd348ac506912d95badc58f8fb9506f7337310bb2eaf5af21974eae7" Dec 04 10:05:00 crc kubenswrapper[4707]: E1204 10:05:00.367633 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5495d328fd348ac506912d95badc58f8fb9506f7337310bb2eaf5af21974eae7\": container with ID starting with 5495d328fd348ac506912d95badc58f8fb9506f7337310bb2eaf5af21974eae7 not found: ID does not exist" containerID="5495d328fd348ac506912d95badc58f8fb9506f7337310bb2eaf5af21974eae7" Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.367667 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5495d328fd348ac506912d95badc58f8fb9506f7337310bb2eaf5af21974eae7"} err="failed to get container status \"5495d328fd348ac506912d95badc58f8fb9506f7337310bb2eaf5af21974eae7\": rpc error: code = NotFound desc = could not find container \"5495d328fd348ac506912d95badc58f8fb9506f7337310bb2eaf5af21974eae7\": container with ID starting with 5495d328fd348ac506912d95badc58f8fb9506f7337310bb2eaf5af21974eae7 not found: ID does not exist" Dec 04 10:05:00 crc kubenswrapper[4707]: I1204 10:05:00.851830 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af00f037-e6f1-4b2d-875f-9d5a0aad93a8" path="/var/lib/kubelet/pods/af00f037-e6f1-4b2d-875f-9d5a0aad93a8/volumes" Dec 04 10:05:04 crc kubenswrapper[4707]: I1204 10:05:04.744557 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:05:04 crc kubenswrapper[4707]: I1204 10:05:04.746037 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:05:04 crc kubenswrapper[4707]: I1204 10:05:04.790576 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:05:05 crc kubenswrapper[4707]: I1204 10:05:05.354177 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:05:05 crc kubenswrapper[4707]: I1204 10:05:05.407210 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tfwml"] Dec 04 10:05:07 crc kubenswrapper[4707]: I1204 10:05:07.325690 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tfwml" podUID="ee3663a0-f580-4d5d-8c38-adda443b7934" containerName="registry-server" containerID="cri-o://6b9403ca0128127ab9353839ff9af7d8d46f383dffca9d491e5a810cdae77e84" gracePeriod=2 Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.279058 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.303453 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee3663a0-f580-4d5d-8c38-adda443b7934-utilities\") pod \"ee3663a0-f580-4d5d-8c38-adda443b7934\" (UID: \"ee3663a0-f580-4d5d-8c38-adda443b7934\") " Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.303594 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfs94\" (UniqueName: \"kubernetes.io/projected/ee3663a0-f580-4d5d-8c38-adda443b7934-kube-api-access-gfs94\") pod \"ee3663a0-f580-4d5d-8c38-adda443b7934\" (UID: \"ee3663a0-f580-4d5d-8c38-adda443b7934\") " Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.304697 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee3663a0-f580-4d5d-8c38-adda443b7934-catalog-content\") pod \"ee3663a0-f580-4d5d-8c38-adda443b7934\" (UID: \"ee3663a0-f580-4d5d-8c38-adda443b7934\") " Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.304715 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee3663a0-f580-4d5d-8c38-adda443b7934-utilities" (OuterVolumeSpecName: "utilities") pod "ee3663a0-f580-4d5d-8c38-adda443b7934" (UID: "ee3663a0-f580-4d5d-8c38-adda443b7934"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.304962 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee3663a0-f580-4d5d-8c38-adda443b7934-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.309481 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee3663a0-f580-4d5d-8c38-adda443b7934-kube-api-access-gfs94" (OuterVolumeSpecName: "kube-api-access-gfs94") pod "ee3663a0-f580-4d5d-8c38-adda443b7934" (UID: "ee3663a0-f580-4d5d-8c38-adda443b7934"). InnerVolumeSpecName "kube-api-access-gfs94". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.333542 4707 generic.go:334] "Generic (PLEG): container finished" podID="ee3663a0-f580-4d5d-8c38-adda443b7934" containerID="6b9403ca0128127ab9353839ff9af7d8d46f383dffca9d491e5a810cdae77e84" exitCode=0 Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.333582 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tfwml" event={"ID":"ee3663a0-f580-4d5d-8c38-adda443b7934","Type":"ContainerDied","Data":"6b9403ca0128127ab9353839ff9af7d8d46f383dffca9d491e5a810cdae77e84"} Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.333597 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tfwml" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.333626 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tfwml" event={"ID":"ee3663a0-f580-4d5d-8c38-adda443b7934","Type":"ContainerDied","Data":"2935a7bd12be8ba7c18d80522e4cd2fb0baf226dfd3d275c97d497e219979c54"} Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.333647 4707 scope.go:117] "RemoveContainer" containerID="6b9403ca0128127ab9353839ff9af7d8d46f383dffca9d491e5a810cdae77e84" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.349240 4707 scope.go:117] "RemoveContainer" containerID="e5b3a2fbdae4af4aa25ff42dce4598567f45348ad2c7e0ebc5c0cc28537179d5" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.357554 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee3663a0-f580-4d5d-8c38-adda443b7934-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee3663a0-f580-4d5d-8c38-adda443b7934" (UID: "ee3663a0-f580-4d5d-8c38-adda443b7934"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.367755 4707 scope.go:117] "RemoveContainer" containerID="fd073ada074680951513fb5000fd5ece42b04a2ff66e29164974c91dd308ddaf" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.392944 4707 scope.go:117] "RemoveContainer" containerID="6b9403ca0128127ab9353839ff9af7d8d46f383dffca9d491e5a810cdae77e84" Dec 04 10:05:08 crc kubenswrapper[4707]: E1204 10:05:08.393422 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b9403ca0128127ab9353839ff9af7d8d46f383dffca9d491e5a810cdae77e84\": container with ID starting with 6b9403ca0128127ab9353839ff9af7d8d46f383dffca9d491e5a810cdae77e84 not found: ID does not exist" containerID="6b9403ca0128127ab9353839ff9af7d8d46f383dffca9d491e5a810cdae77e84" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.393451 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b9403ca0128127ab9353839ff9af7d8d46f383dffca9d491e5a810cdae77e84"} err="failed to get container status \"6b9403ca0128127ab9353839ff9af7d8d46f383dffca9d491e5a810cdae77e84\": rpc error: code = NotFound desc = could not find container \"6b9403ca0128127ab9353839ff9af7d8d46f383dffca9d491e5a810cdae77e84\": container with ID starting with 6b9403ca0128127ab9353839ff9af7d8d46f383dffca9d491e5a810cdae77e84 not found: ID does not exist" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.393473 4707 scope.go:117] "RemoveContainer" containerID="e5b3a2fbdae4af4aa25ff42dce4598567f45348ad2c7e0ebc5c0cc28537179d5" Dec 04 10:05:08 crc kubenswrapper[4707]: E1204 10:05:08.393798 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5b3a2fbdae4af4aa25ff42dce4598567f45348ad2c7e0ebc5c0cc28537179d5\": container with ID starting with e5b3a2fbdae4af4aa25ff42dce4598567f45348ad2c7e0ebc5c0cc28537179d5 not found: ID does not exist" containerID="e5b3a2fbdae4af4aa25ff42dce4598567f45348ad2c7e0ebc5c0cc28537179d5" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.393818 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5b3a2fbdae4af4aa25ff42dce4598567f45348ad2c7e0ebc5c0cc28537179d5"} err="failed to get container status \"e5b3a2fbdae4af4aa25ff42dce4598567f45348ad2c7e0ebc5c0cc28537179d5\": rpc error: code = NotFound desc = could not find container \"e5b3a2fbdae4af4aa25ff42dce4598567f45348ad2c7e0ebc5c0cc28537179d5\": container with ID starting with e5b3a2fbdae4af4aa25ff42dce4598567f45348ad2c7e0ebc5c0cc28537179d5 not found: ID does not exist" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.393832 4707 scope.go:117] "RemoveContainer" containerID="fd073ada074680951513fb5000fd5ece42b04a2ff66e29164974c91dd308ddaf" Dec 04 10:05:08 crc kubenswrapper[4707]: E1204 10:05:08.394187 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd073ada074680951513fb5000fd5ece42b04a2ff66e29164974c91dd308ddaf\": container with ID starting with fd073ada074680951513fb5000fd5ece42b04a2ff66e29164974c91dd308ddaf not found: ID does not exist" containerID="fd073ada074680951513fb5000fd5ece42b04a2ff66e29164974c91dd308ddaf" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.394206 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd073ada074680951513fb5000fd5ece42b04a2ff66e29164974c91dd308ddaf"} err="failed to get container status \"fd073ada074680951513fb5000fd5ece42b04a2ff66e29164974c91dd308ddaf\": rpc error: code = NotFound desc = could not find container \"fd073ada074680951513fb5000fd5ece42b04a2ff66e29164974c91dd308ddaf\": container with ID starting with fd073ada074680951513fb5000fd5ece42b04a2ff66e29164974c91dd308ddaf not found: ID does not exist" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.405478 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee3663a0-f580-4d5d-8c38-adda443b7934-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.405528 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfs94\" (UniqueName: \"kubernetes.io/projected/ee3663a0-f580-4d5d-8c38-adda443b7934-kube-api-access-gfs94\") on node \"crc\" DevicePath \"\"" Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.661037 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tfwml"] Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.665578 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tfwml"] Dec 04 10:05:08 crc kubenswrapper[4707]: I1204 10:05:08.852872 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee3663a0-f580-4d5d-8c38-adda443b7934" path="/var/lib/kubelet/pods/ee3663a0-f580-4d5d-8c38-adda443b7934/volumes" Dec 04 10:05:13 crc kubenswrapper[4707]: I1204 10:05:13.540890 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-kv6jh_058a219f-7ca4-486e-87e2-7406ad069250/kube-rbac-proxy/0.log" Dec 04 10:05:13 crc kubenswrapper[4707]: I1204 10:05:13.561296 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-kv6jh_058a219f-7ca4-486e-87e2-7406ad069250/controller/0.log" Dec 04 10:05:13 crc kubenswrapper[4707]: I1204 10:05:13.713490 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-frr-files/0.log" Dec 04 10:05:13 crc kubenswrapper[4707]: I1204 10:05:13.886312 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-frr-files/0.log" Dec 04 10:05:13 crc kubenswrapper[4707]: I1204 10:05:13.888988 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-reloader/0.log" Dec 04 10:05:13 crc kubenswrapper[4707]: I1204 10:05:13.898849 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-metrics/0.log" Dec 04 10:05:13 crc kubenswrapper[4707]: I1204 10:05:13.942879 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-reloader/0.log" Dec 04 10:05:14 crc kubenswrapper[4707]: I1204 10:05:14.103112 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-reloader/0.log" Dec 04 10:05:14 crc kubenswrapper[4707]: I1204 10:05:14.125626 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-frr-files/0.log" Dec 04 10:05:14 crc kubenswrapper[4707]: I1204 10:05:14.169718 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-metrics/0.log" Dec 04 10:05:14 crc kubenswrapper[4707]: I1204 10:05:14.169766 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-metrics/0.log" Dec 04 10:05:14 crc kubenswrapper[4707]: I1204 10:05:14.345420 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-frr-files/0.log" Dec 04 10:05:14 crc kubenswrapper[4707]: I1204 10:05:14.368953 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-metrics/0.log" Dec 04 10:05:14 crc kubenswrapper[4707]: I1204 10:05:14.372410 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-reloader/0.log" Dec 04 10:05:14 crc kubenswrapper[4707]: I1204 10:05:14.383840 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/controller/0.log" Dec 04 10:05:14 crc kubenswrapper[4707]: I1204 10:05:14.532906 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/frr-metrics/0.log" Dec 04 10:05:14 crc kubenswrapper[4707]: I1204 10:05:14.556812 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/kube-rbac-proxy-frr/0.log" Dec 04 10:05:14 crc kubenswrapper[4707]: I1204 10:05:14.575779 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/kube-rbac-proxy/0.log" Dec 04 10:05:14 crc kubenswrapper[4707]: I1204 10:05:14.871824 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/frr/0.log" Dec 04 10:05:14 crc kubenswrapper[4707]: I1204 10:05:14.924033 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/reloader/0.log" Dec 04 10:05:14 crc kubenswrapper[4707]: I1204 10:05:14.951686 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-vxncn_31fd648a-f639-45c0-a30c-77afc9cafedc/frr-k8s-webhook-server/0.log" Dec 04 10:05:15 crc kubenswrapper[4707]: I1204 10:05:15.130459 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5657775d6b-zhwls_41bddb2a-8c9f-42a0-a450-06a9e755c211/manager/0.log" Dec 04 10:05:15 crc kubenswrapper[4707]: I1204 10:05:15.145712 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-fcf969487-8d9dx_6a9e4f53-c751-4994-8e44-6bcc07b40dc8/webhook-server/0.log" Dec 04 10:05:15 crc kubenswrapper[4707]: I1204 10:05:15.297375 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-599tz_52f06bc6-db65-4283-961c-3bee70be7363/kube-rbac-proxy/0.log" Dec 04 10:05:15 crc kubenswrapper[4707]: I1204 10:05:15.359703 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-599tz_52f06bc6-db65-4283-961c-3bee70be7363/speaker/0.log" Dec 04 10:05:22 crc kubenswrapper[4707]: I1204 10:05:22.667410 4707 scope.go:117] "RemoveContainer" containerID="0c5be9822eadcf7e6cccc699a343a98de4285bff130efaca8d7271edac999ac2" Dec 04 10:05:22 crc kubenswrapper[4707]: I1204 10:05:22.688575 4707 scope.go:117] "RemoveContainer" containerID="1d66559e5982470757c7a470726c3b3b76628e9d8b86363d0da3dd475beb469a" Dec 04 10:05:22 crc kubenswrapper[4707]: I1204 10:05:22.715661 4707 scope.go:117] "RemoveContainer" containerID="a005f97f50b5ac3055eae49dcb7c1d520b9756155f43a9fd53876fc06be20a58" Dec 04 10:05:22 crc kubenswrapper[4707]: I1204 10:05:22.743702 4707 scope.go:117] "RemoveContainer" containerID="3bedb78b68b1a236ce1c7df4a602bc6bb68ffed2c74c8c1d67b523e01d1bffe6" Dec 04 10:05:22 crc kubenswrapper[4707]: I1204 10:05:22.763170 4707 scope.go:117] "RemoveContainer" containerID="71279b45abda23aed836834dff7a5b6c3a8b344427e097bd9e981c90b505a3d2" Dec 04 10:05:22 crc kubenswrapper[4707]: I1204 10:05:22.802465 4707 scope.go:117] "RemoveContainer" containerID="1351f5bd558010df344c3764b52c56571a496675b2e831eda286471ac011eb44" Dec 04 10:05:30 crc kubenswrapper[4707]: I1204 10:05:30.817174 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 10:05:30 crc kubenswrapper[4707]: I1204 10:05:30.817846 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 10:05:38 crc kubenswrapper[4707]: I1204 10:05:38.152098 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz_1b113f24-11b8-4720-87a6-ccae8b3f888e/util/0.log" Dec 04 10:05:38 crc kubenswrapper[4707]: I1204 10:05:38.320179 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz_1b113f24-11b8-4720-87a6-ccae8b3f888e/util/0.log" Dec 04 10:05:38 crc kubenswrapper[4707]: I1204 10:05:38.325932 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz_1b113f24-11b8-4720-87a6-ccae8b3f888e/pull/0.log" Dec 04 10:05:38 crc kubenswrapper[4707]: I1204 10:05:38.352904 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz_1b113f24-11b8-4720-87a6-ccae8b3f888e/pull/0.log" Dec 04 10:05:38 crc kubenswrapper[4707]: I1204 10:05:38.488267 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz_1b113f24-11b8-4720-87a6-ccae8b3f888e/util/0.log" Dec 04 10:05:38 crc kubenswrapper[4707]: I1204 10:05:38.488570 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz_1b113f24-11b8-4720-87a6-ccae8b3f888e/extract/0.log" Dec 04 10:05:38 crc kubenswrapper[4707]: I1204 10:05:38.496449 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz_1b113f24-11b8-4720-87a6-ccae8b3f888e/pull/0.log" Dec 04 10:05:38 crc kubenswrapper[4707]: I1204 10:05:38.637236 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-flfxl_ae1d3840-1144-4905-8415-8817aa67d299/extract-utilities/0.log" Dec 04 10:05:38 crc kubenswrapper[4707]: I1204 10:05:38.811629 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-flfxl_ae1d3840-1144-4905-8415-8817aa67d299/extract-content/0.log" Dec 04 10:05:38 crc kubenswrapper[4707]: I1204 10:05:38.823788 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-flfxl_ae1d3840-1144-4905-8415-8817aa67d299/extract-utilities/0.log" Dec 04 10:05:38 crc kubenswrapper[4707]: I1204 10:05:38.839284 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-flfxl_ae1d3840-1144-4905-8415-8817aa67d299/extract-content/0.log" Dec 04 10:05:38 crc kubenswrapper[4707]: I1204 10:05:38.956426 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-flfxl_ae1d3840-1144-4905-8415-8817aa67d299/extract-content/0.log" Dec 04 10:05:38 crc kubenswrapper[4707]: I1204 10:05:38.977320 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-flfxl_ae1d3840-1144-4905-8415-8817aa67d299/extract-utilities/0.log" Dec 04 10:05:39 crc kubenswrapper[4707]: I1204 10:05:39.183845 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xbrw2_ad0fabbb-7625-4520-8298-8379635bb03c/extract-utilities/0.log" Dec 04 10:05:39 crc kubenswrapper[4707]: I1204 10:05:39.276910 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-flfxl_ae1d3840-1144-4905-8415-8817aa67d299/registry-server/0.log" Dec 04 10:05:39 crc kubenswrapper[4707]: I1204 10:05:39.282576 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xbrw2_ad0fabbb-7625-4520-8298-8379635bb03c/extract-content/0.log" Dec 04 10:05:39 crc kubenswrapper[4707]: I1204 10:05:39.299417 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xbrw2_ad0fabbb-7625-4520-8298-8379635bb03c/extract-utilities/0.log" Dec 04 10:05:39 crc kubenswrapper[4707]: I1204 10:05:39.346796 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xbrw2_ad0fabbb-7625-4520-8298-8379635bb03c/extract-content/0.log" Dec 04 10:05:39 crc kubenswrapper[4707]: I1204 10:05:39.527958 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xbrw2_ad0fabbb-7625-4520-8298-8379635bb03c/extract-utilities/0.log" Dec 04 10:05:39 crc kubenswrapper[4707]: I1204 10:05:39.551397 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xbrw2_ad0fabbb-7625-4520-8298-8379635bb03c/extract-content/0.log" Dec 04 10:05:39 crc kubenswrapper[4707]: I1204 10:05:39.734236 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-8qmqt_860836b6-d7c9-4c56-9193-c4bbaeca659b/marketplace-operator/0.log" Dec 04 10:05:39 crc kubenswrapper[4707]: I1204 10:05:39.848775 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ngcrl_2a03b094-f535-4b09-9bef-016450d98586/extract-utilities/0.log" Dec 04 10:05:39 crc kubenswrapper[4707]: I1204 10:05:39.921529 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xbrw2_ad0fabbb-7625-4520-8298-8379635bb03c/registry-server/0.log" Dec 04 10:05:40 crc kubenswrapper[4707]: I1204 10:05:40.000754 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ngcrl_2a03b094-f535-4b09-9bef-016450d98586/extract-content/0.log" Dec 04 10:05:40 crc kubenswrapper[4707]: I1204 10:05:40.002012 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ngcrl_2a03b094-f535-4b09-9bef-016450d98586/extract-utilities/0.log" Dec 04 10:05:40 crc kubenswrapper[4707]: I1204 10:05:40.027466 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ngcrl_2a03b094-f535-4b09-9bef-016450d98586/extract-content/0.log" Dec 04 10:05:40 crc kubenswrapper[4707]: I1204 10:05:40.218548 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ngcrl_2a03b094-f535-4b09-9bef-016450d98586/extract-content/0.log" Dec 04 10:05:40 crc kubenswrapper[4707]: I1204 10:05:40.236110 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ngcrl_2a03b094-f535-4b09-9bef-016450d98586/extract-utilities/0.log" Dec 04 10:05:40 crc kubenswrapper[4707]: I1204 10:05:40.296963 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ngcrl_2a03b094-f535-4b09-9bef-016450d98586/registry-server/0.log" Dec 04 10:05:40 crc kubenswrapper[4707]: I1204 10:05:40.415378 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcrk2_5ff4a232-1027-4cb9-a021-9b320f41b041/extract-utilities/0.log" Dec 04 10:05:40 crc kubenswrapper[4707]: I1204 10:05:40.535293 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcrk2_5ff4a232-1027-4cb9-a021-9b320f41b041/extract-utilities/0.log" Dec 04 10:05:40 crc kubenswrapper[4707]: I1204 10:05:40.563320 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcrk2_5ff4a232-1027-4cb9-a021-9b320f41b041/extract-content/0.log" Dec 04 10:05:40 crc kubenswrapper[4707]: I1204 10:05:40.568569 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcrk2_5ff4a232-1027-4cb9-a021-9b320f41b041/extract-content/0.log" Dec 04 10:05:40 crc kubenswrapper[4707]: I1204 10:05:40.716837 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcrk2_5ff4a232-1027-4cb9-a021-9b320f41b041/extract-content/0.log" Dec 04 10:05:40 crc kubenswrapper[4707]: I1204 10:05:40.717092 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcrk2_5ff4a232-1027-4cb9-a021-9b320f41b041/extract-utilities/0.log" Dec 04 10:05:41 crc kubenswrapper[4707]: I1204 10:05:41.049216 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcrk2_5ff4a232-1027-4cb9-a021-9b320f41b041/registry-server/0.log" Dec 04 10:06:00 crc kubenswrapper[4707]: I1204 10:06:00.817508 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 10:06:00 crc kubenswrapper[4707]: I1204 10:06:00.818059 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 10:06:30 crc kubenswrapper[4707]: I1204 10:06:30.817107 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 10:06:30 crc kubenswrapper[4707]: I1204 10:06:30.817682 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 10:06:30 crc kubenswrapper[4707]: I1204 10:06:30.817738 4707 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 10:06:30 crc kubenswrapper[4707]: I1204 10:06:30.820962 4707 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8"} pod="openshift-machine-config-operator/machine-config-daemon-c244z" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 10:06:30 crc kubenswrapper[4707]: I1204 10:06:30.821095 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" containerID="cri-o://c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" gracePeriod=600 Dec 04 10:06:31 crc kubenswrapper[4707]: E1204 10:06:31.462600 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:06:31 crc kubenswrapper[4707]: I1204 10:06:31.808105 4707 generic.go:334] "Generic (PLEG): container finished" podID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" exitCode=0 Dec 04 10:06:31 crc kubenswrapper[4707]: I1204 10:06:31.808144 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerDied","Data":"c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8"} Dec 04 10:06:31 crc kubenswrapper[4707]: I1204 10:06:31.808179 4707 scope.go:117] "RemoveContainer" containerID="a40438c3e1376df9722e48d67ff0c6c89cb3a6cefb6a18fc0adad90335e07b60" Dec 04 10:06:31 crc kubenswrapper[4707]: I1204 10:06:31.808716 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:06:31 crc kubenswrapper[4707]: E1204 10:06:31.808952 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:06:43 crc kubenswrapper[4707]: I1204 10:06:43.844411 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:06:43 crc kubenswrapper[4707]: E1204 10:06:43.845316 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:06:51 crc kubenswrapper[4707]: I1204 10:06:51.948877 4707 generic.go:334] "Generic (PLEG): container finished" podID="7fd40b26-2ba1-4f8f-93ba-050266ad3fde" containerID="6816820c779ddf1219a2c6fe304ba1b93c81fa4b67ac868700de924a45f8667d" exitCode=0 Dec 04 10:06:51 crc kubenswrapper[4707]: I1204 10:06:51.948969 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdrpr/must-gather-l8l4v" event={"ID":"7fd40b26-2ba1-4f8f-93ba-050266ad3fde","Type":"ContainerDied","Data":"6816820c779ddf1219a2c6fe304ba1b93c81fa4b67ac868700de924a45f8667d"} Dec 04 10:06:51 crc kubenswrapper[4707]: I1204 10:06:51.950697 4707 scope.go:117] "RemoveContainer" containerID="6816820c779ddf1219a2c6fe304ba1b93c81fa4b67ac868700de924a45f8667d" Dec 04 10:06:52 crc kubenswrapper[4707]: I1204 10:06:52.178323 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jdrpr_must-gather-l8l4v_7fd40b26-2ba1-4f8f-93ba-050266ad3fde/gather/0.log" Dec 04 10:06:58 crc kubenswrapper[4707]: I1204 10:06:58.844444 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:06:58 crc kubenswrapper[4707]: E1204 10:06:58.845164 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:06:58 crc kubenswrapper[4707]: I1204 10:06:58.910768 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jdrpr/must-gather-l8l4v"] Dec 04 10:06:58 crc kubenswrapper[4707]: I1204 10:06:58.911058 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-jdrpr/must-gather-l8l4v" podUID="7fd40b26-2ba1-4f8f-93ba-050266ad3fde" containerName="copy" containerID="cri-o://fd9a86cb6f5d551f51cb91bb2e0676c0ef387f6f1889f206838e36e808381385" gracePeriod=2 Dec 04 10:06:58 crc kubenswrapper[4707]: I1204 10:06:58.917580 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jdrpr/must-gather-l8l4v"] Dec 04 10:06:59 crc kubenswrapper[4707]: I1204 10:06:59.415075 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jdrpr_must-gather-l8l4v_7fd40b26-2ba1-4f8f-93ba-050266ad3fde/copy/0.log" Dec 04 10:06:59 crc kubenswrapper[4707]: I1204 10:06:59.415488 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdrpr/must-gather-l8l4v" Dec 04 10:06:59 crc kubenswrapper[4707]: I1204 10:06:59.561978 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lp85\" (UniqueName: \"kubernetes.io/projected/7fd40b26-2ba1-4f8f-93ba-050266ad3fde-kube-api-access-9lp85\") pod \"7fd40b26-2ba1-4f8f-93ba-050266ad3fde\" (UID: \"7fd40b26-2ba1-4f8f-93ba-050266ad3fde\") " Dec 04 10:06:59 crc kubenswrapper[4707]: I1204 10:06:59.562350 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7fd40b26-2ba1-4f8f-93ba-050266ad3fde-must-gather-output\") pod \"7fd40b26-2ba1-4f8f-93ba-050266ad3fde\" (UID: \"7fd40b26-2ba1-4f8f-93ba-050266ad3fde\") " Dec 04 10:06:59 crc kubenswrapper[4707]: I1204 10:06:59.568154 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fd40b26-2ba1-4f8f-93ba-050266ad3fde-kube-api-access-9lp85" (OuterVolumeSpecName: "kube-api-access-9lp85") pod "7fd40b26-2ba1-4f8f-93ba-050266ad3fde" (UID: "7fd40b26-2ba1-4f8f-93ba-050266ad3fde"). InnerVolumeSpecName "kube-api-access-9lp85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:06:59 crc kubenswrapper[4707]: I1204 10:06:59.622513 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fd40b26-2ba1-4f8f-93ba-050266ad3fde-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "7fd40b26-2ba1-4f8f-93ba-050266ad3fde" (UID: "7fd40b26-2ba1-4f8f-93ba-050266ad3fde"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:06:59 crc kubenswrapper[4707]: I1204 10:06:59.662878 4707 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7fd40b26-2ba1-4f8f-93ba-050266ad3fde-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 04 10:06:59 crc kubenswrapper[4707]: I1204 10:06:59.662919 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lp85\" (UniqueName: \"kubernetes.io/projected/7fd40b26-2ba1-4f8f-93ba-050266ad3fde-kube-api-access-9lp85\") on node \"crc\" DevicePath \"\"" Dec 04 10:07:00 crc kubenswrapper[4707]: I1204 10:07:00.005812 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jdrpr_must-gather-l8l4v_7fd40b26-2ba1-4f8f-93ba-050266ad3fde/copy/0.log" Dec 04 10:07:00 crc kubenswrapper[4707]: I1204 10:07:00.006939 4707 generic.go:334] "Generic (PLEG): container finished" podID="7fd40b26-2ba1-4f8f-93ba-050266ad3fde" containerID="fd9a86cb6f5d551f51cb91bb2e0676c0ef387f6f1889f206838e36e808381385" exitCode=143 Dec 04 10:07:00 crc kubenswrapper[4707]: I1204 10:07:00.007007 4707 scope.go:117] "RemoveContainer" containerID="fd9a86cb6f5d551f51cb91bb2e0676c0ef387f6f1889f206838e36e808381385" Dec 04 10:07:00 crc kubenswrapper[4707]: I1204 10:07:00.007033 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdrpr/must-gather-l8l4v" Dec 04 10:07:00 crc kubenswrapper[4707]: I1204 10:07:00.024736 4707 scope.go:117] "RemoveContainer" containerID="6816820c779ddf1219a2c6fe304ba1b93c81fa4b67ac868700de924a45f8667d" Dec 04 10:07:00 crc kubenswrapper[4707]: I1204 10:07:00.055574 4707 scope.go:117] "RemoveContainer" containerID="fd9a86cb6f5d551f51cb91bb2e0676c0ef387f6f1889f206838e36e808381385" Dec 04 10:07:00 crc kubenswrapper[4707]: E1204 10:07:00.056352 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd9a86cb6f5d551f51cb91bb2e0676c0ef387f6f1889f206838e36e808381385\": container with ID starting with fd9a86cb6f5d551f51cb91bb2e0676c0ef387f6f1889f206838e36e808381385 not found: ID does not exist" containerID="fd9a86cb6f5d551f51cb91bb2e0676c0ef387f6f1889f206838e36e808381385" Dec 04 10:07:00 crc kubenswrapper[4707]: I1204 10:07:00.056385 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd9a86cb6f5d551f51cb91bb2e0676c0ef387f6f1889f206838e36e808381385"} err="failed to get container status \"fd9a86cb6f5d551f51cb91bb2e0676c0ef387f6f1889f206838e36e808381385\": rpc error: code = NotFound desc = could not find container \"fd9a86cb6f5d551f51cb91bb2e0676c0ef387f6f1889f206838e36e808381385\": container with ID starting with fd9a86cb6f5d551f51cb91bb2e0676c0ef387f6f1889f206838e36e808381385 not found: ID does not exist" Dec 04 10:07:00 crc kubenswrapper[4707]: I1204 10:07:00.056423 4707 scope.go:117] "RemoveContainer" containerID="6816820c779ddf1219a2c6fe304ba1b93c81fa4b67ac868700de924a45f8667d" Dec 04 10:07:00 crc kubenswrapper[4707]: E1204 10:07:00.056930 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6816820c779ddf1219a2c6fe304ba1b93c81fa4b67ac868700de924a45f8667d\": container with ID starting with 6816820c779ddf1219a2c6fe304ba1b93c81fa4b67ac868700de924a45f8667d not found: ID does not exist" containerID="6816820c779ddf1219a2c6fe304ba1b93c81fa4b67ac868700de924a45f8667d" Dec 04 10:07:00 crc kubenswrapper[4707]: I1204 10:07:00.056956 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6816820c779ddf1219a2c6fe304ba1b93c81fa4b67ac868700de924a45f8667d"} err="failed to get container status \"6816820c779ddf1219a2c6fe304ba1b93c81fa4b67ac868700de924a45f8667d\": rpc error: code = NotFound desc = could not find container \"6816820c779ddf1219a2c6fe304ba1b93c81fa4b67ac868700de924a45f8667d\": container with ID starting with 6816820c779ddf1219a2c6fe304ba1b93c81fa4b67ac868700de924a45f8667d not found: ID does not exist" Dec 04 10:07:00 crc kubenswrapper[4707]: I1204 10:07:00.851520 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fd40b26-2ba1-4f8f-93ba-050266ad3fde" path="/var/lib/kubelet/pods/7fd40b26-2ba1-4f8f-93ba-050266ad3fde/volumes" Dec 04 10:07:13 crc kubenswrapper[4707]: I1204 10:07:13.845162 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:07:13 crc kubenswrapper[4707]: E1204 10:07:13.845977 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:07:22 crc kubenswrapper[4707]: I1204 10:07:22.896515 4707 scope.go:117] "RemoveContainer" containerID="dce391e07dc0c067ad013719a113982a7b65aefbb658700830a0a7e6d53477d4" Dec 04 10:07:22 crc kubenswrapper[4707]: I1204 10:07:22.914444 4707 scope.go:117] "RemoveContainer" containerID="27e81c0890a27226fd777a1a811bd0713e1ec2d4121f428f752db37af6237c39" Dec 04 10:07:22 crc kubenswrapper[4707]: I1204 10:07:22.935696 4707 scope.go:117] "RemoveContainer" containerID="67f876de9692bd20cf6cffd99fde2450a9fbb10f390ea55ee528a2b93c1aeb10" Dec 04 10:07:22 crc kubenswrapper[4707]: I1204 10:07:22.967319 4707 scope.go:117] "RemoveContainer" containerID="da9f950d03719060d2813b8b17feb8723c54f1d8fa7271b26ccf944e1e778962" Dec 04 10:07:22 crc kubenswrapper[4707]: I1204 10:07:22.981825 4707 scope.go:117] "RemoveContainer" containerID="f54c3cee1446aca41c1854835c53c1fd4596dcc8cc3bb39524ad221554491d40" Dec 04 10:07:24 crc kubenswrapper[4707]: I1204 10:07:24.845319 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:07:24 crc kubenswrapper[4707]: E1204 10:07:24.845550 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:07:35 crc kubenswrapper[4707]: I1204 10:07:35.844986 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:07:35 crc kubenswrapper[4707]: E1204 10:07:35.848139 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:07:49 crc kubenswrapper[4707]: I1204 10:07:49.844884 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:07:49 crc kubenswrapper[4707]: E1204 10:07:49.845655 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:08:01 crc kubenswrapper[4707]: I1204 10:08:01.845017 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:08:01 crc kubenswrapper[4707]: E1204 10:08:01.845635 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.225056 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-clscm"] Dec 04 10:08:13 crc kubenswrapper[4707]: E1204 10:08:13.225846 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af00f037-e6f1-4b2d-875f-9d5a0aad93a8" containerName="extract-utilities" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.225858 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="af00f037-e6f1-4b2d-875f-9d5a0aad93a8" containerName="extract-utilities" Dec 04 10:08:13 crc kubenswrapper[4707]: E1204 10:08:13.225868 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee3663a0-f580-4d5d-8c38-adda443b7934" containerName="extract-content" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.225875 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee3663a0-f580-4d5d-8c38-adda443b7934" containerName="extract-content" Dec 04 10:08:13 crc kubenswrapper[4707]: E1204 10:08:13.225883 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee3663a0-f580-4d5d-8c38-adda443b7934" containerName="extract-utilities" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.225889 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee3663a0-f580-4d5d-8c38-adda443b7934" containerName="extract-utilities" Dec 04 10:08:13 crc kubenswrapper[4707]: E1204 10:08:13.225904 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fd40b26-2ba1-4f8f-93ba-050266ad3fde" containerName="gather" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.225910 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fd40b26-2ba1-4f8f-93ba-050266ad3fde" containerName="gather" Dec 04 10:08:13 crc kubenswrapper[4707]: E1204 10:08:13.225921 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fd40b26-2ba1-4f8f-93ba-050266ad3fde" containerName="copy" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.225927 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fd40b26-2ba1-4f8f-93ba-050266ad3fde" containerName="copy" Dec 04 10:08:13 crc kubenswrapper[4707]: E1204 10:08:13.225936 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af00f037-e6f1-4b2d-875f-9d5a0aad93a8" containerName="registry-server" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.225942 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="af00f037-e6f1-4b2d-875f-9d5a0aad93a8" containerName="registry-server" Dec 04 10:08:13 crc kubenswrapper[4707]: E1204 10:08:13.225950 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee3663a0-f580-4d5d-8c38-adda443b7934" containerName="registry-server" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.225955 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee3663a0-f580-4d5d-8c38-adda443b7934" containerName="registry-server" Dec 04 10:08:13 crc kubenswrapper[4707]: E1204 10:08:13.225967 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af00f037-e6f1-4b2d-875f-9d5a0aad93a8" containerName="extract-content" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.225973 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="af00f037-e6f1-4b2d-875f-9d5a0aad93a8" containerName="extract-content" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.226070 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fd40b26-2ba1-4f8f-93ba-050266ad3fde" containerName="copy" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.226088 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fd40b26-2ba1-4f8f-93ba-050266ad3fde" containerName="gather" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.226099 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="af00f037-e6f1-4b2d-875f-9d5a0aad93a8" containerName="registry-server" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.226113 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee3663a0-f580-4d5d-8c38-adda443b7934" containerName="registry-server" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.227038 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.229524 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-clscm"] Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.264695 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j55wm\" (UniqueName: \"kubernetes.io/projected/ed35aba4-eab4-46de-8b85-03f41d79c22f-kube-api-access-j55wm\") pod \"redhat-operators-clscm\" (UID: \"ed35aba4-eab4-46de-8b85-03f41d79c22f\") " pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.265246 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed35aba4-eab4-46de-8b85-03f41d79c22f-catalog-content\") pod \"redhat-operators-clscm\" (UID: \"ed35aba4-eab4-46de-8b85-03f41d79c22f\") " pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.265325 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed35aba4-eab4-46de-8b85-03f41d79c22f-utilities\") pod \"redhat-operators-clscm\" (UID: \"ed35aba4-eab4-46de-8b85-03f41d79c22f\") " pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.367123 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j55wm\" (UniqueName: \"kubernetes.io/projected/ed35aba4-eab4-46de-8b85-03f41d79c22f-kube-api-access-j55wm\") pod \"redhat-operators-clscm\" (UID: \"ed35aba4-eab4-46de-8b85-03f41d79c22f\") " pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.367195 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed35aba4-eab4-46de-8b85-03f41d79c22f-catalog-content\") pod \"redhat-operators-clscm\" (UID: \"ed35aba4-eab4-46de-8b85-03f41d79c22f\") " pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.367230 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed35aba4-eab4-46de-8b85-03f41d79c22f-utilities\") pod \"redhat-operators-clscm\" (UID: \"ed35aba4-eab4-46de-8b85-03f41d79c22f\") " pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.367789 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed35aba4-eab4-46de-8b85-03f41d79c22f-utilities\") pod \"redhat-operators-clscm\" (UID: \"ed35aba4-eab4-46de-8b85-03f41d79c22f\") " pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.368527 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed35aba4-eab4-46de-8b85-03f41d79c22f-catalog-content\") pod \"redhat-operators-clscm\" (UID: \"ed35aba4-eab4-46de-8b85-03f41d79c22f\") " pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.393743 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j55wm\" (UniqueName: \"kubernetes.io/projected/ed35aba4-eab4-46de-8b85-03f41d79c22f-kube-api-access-j55wm\") pod \"redhat-operators-clscm\" (UID: \"ed35aba4-eab4-46de-8b85-03f41d79c22f\") " pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.421473 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fvcwx"] Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.422769 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.428091 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fvcwx"] Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.468419 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5f08d69-168a-4fa0-86b3-ffd17de510b8-utilities\") pod \"community-operators-fvcwx\" (UID: \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\") " pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.468495 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sc669\" (UniqueName: \"kubernetes.io/projected/c5f08d69-168a-4fa0-86b3-ffd17de510b8-kube-api-access-sc669\") pod \"community-operators-fvcwx\" (UID: \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\") " pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.468633 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5f08d69-168a-4fa0-86b3-ffd17de510b8-catalog-content\") pod \"community-operators-fvcwx\" (UID: \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\") " pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.559100 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.569179 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sc669\" (UniqueName: \"kubernetes.io/projected/c5f08d69-168a-4fa0-86b3-ffd17de510b8-kube-api-access-sc669\") pod \"community-operators-fvcwx\" (UID: \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\") " pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.569236 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5f08d69-168a-4fa0-86b3-ffd17de510b8-catalog-content\") pod \"community-operators-fvcwx\" (UID: \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\") " pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.569291 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5f08d69-168a-4fa0-86b3-ffd17de510b8-utilities\") pod \"community-operators-fvcwx\" (UID: \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\") " pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.569702 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5f08d69-168a-4fa0-86b3-ffd17de510b8-utilities\") pod \"community-operators-fvcwx\" (UID: \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\") " pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.570391 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5f08d69-168a-4fa0-86b3-ffd17de510b8-catalog-content\") pod \"community-operators-fvcwx\" (UID: \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\") " pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.592375 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sc669\" (UniqueName: \"kubernetes.io/projected/c5f08d69-168a-4fa0-86b3-ffd17de510b8-kube-api-access-sc669\") pod \"community-operators-fvcwx\" (UID: \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\") " pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.751023 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:13 crc kubenswrapper[4707]: I1204 10:08:13.794073 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-clscm"] Dec 04 10:08:14 crc kubenswrapper[4707]: I1204 10:08:14.057151 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fvcwx"] Dec 04 10:08:14 crc kubenswrapper[4707]: W1204 10:08:14.085421 4707 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5f08d69_168a_4fa0_86b3_ffd17de510b8.slice/crio-8a594bada8a8cd7f58e05612fa4d1dd9cf29b39a426c0aa725ae559d4e4bc80e WatchSource:0}: Error finding container 8a594bada8a8cd7f58e05612fa4d1dd9cf29b39a426c0aa725ae559d4e4bc80e: Status 404 returned error can't find the container with id 8a594bada8a8cd7f58e05612fa4d1dd9cf29b39a426c0aa725ae559d4e4bc80e Dec 04 10:08:14 crc kubenswrapper[4707]: I1204 10:08:14.426904 4707 generic.go:334] "Generic (PLEG): container finished" podID="c5f08d69-168a-4fa0-86b3-ffd17de510b8" containerID="abd0daa1ef87de4d274f2b69309b41db6f50dc336bc5554246e1e083a588fdc4" exitCode=0 Dec 04 10:08:14 crc kubenswrapper[4707]: I1204 10:08:14.427027 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvcwx" event={"ID":"c5f08d69-168a-4fa0-86b3-ffd17de510b8","Type":"ContainerDied","Data":"abd0daa1ef87de4d274f2b69309b41db6f50dc336bc5554246e1e083a588fdc4"} Dec 04 10:08:14 crc kubenswrapper[4707]: I1204 10:08:14.427282 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvcwx" event={"ID":"c5f08d69-168a-4fa0-86b3-ffd17de510b8","Type":"ContainerStarted","Data":"8a594bada8a8cd7f58e05612fa4d1dd9cf29b39a426c0aa725ae559d4e4bc80e"} Dec 04 10:08:14 crc kubenswrapper[4707]: I1204 10:08:14.428730 4707 generic.go:334] "Generic (PLEG): container finished" podID="ed35aba4-eab4-46de-8b85-03f41d79c22f" containerID="88cef275e82bbbc8c1415568f2638605e70be0f38ba3cbde0987d4be447d9866" exitCode=0 Dec 04 10:08:14 crc kubenswrapper[4707]: I1204 10:08:14.428755 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-clscm" event={"ID":"ed35aba4-eab4-46de-8b85-03f41d79c22f","Type":"ContainerDied","Data":"88cef275e82bbbc8c1415568f2638605e70be0f38ba3cbde0987d4be447d9866"} Dec 04 10:08:14 crc kubenswrapper[4707]: I1204 10:08:14.428786 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-clscm" event={"ID":"ed35aba4-eab4-46de-8b85-03f41d79c22f","Type":"ContainerStarted","Data":"64458f543dbc948786738bfbcd0367b177a8e3f8af5535a0d463c18455bd00e6"} Dec 04 10:08:15 crc kubenswrapper[4707]: I1204 10:08:15.438990 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvcwx" event={"ID":"c5f08d69-168a-4fa0-86b3-ffd17de510b8","Type":"ContainerStarted","Data":"ccace9e60ed2dbc032217b7952e8fc2923fbcba2dcf79cece724499a36dc950f"} Dec 04 10:08:15 crc kubenswrapper[4707]: I1204 10:08:15.441204 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-clscm" event={"ID":"ed35aba4-eab4-46de-8b85-03f41d79c22f","Type":"ContainerStarted","Data":"65b351d910c0a585f249e2b08b1ae5ffa57b8975df9e461ef7ed584e8c2008fa"} Dec 04 10:08:15 crc kubenswrapper[4707]: I1204 10:08:15.845064 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:08:15 crc kubenswrapper[4707]: E1204 10:08:15.845285 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:08:16 crc kubenswrapper[4707]: I1204 10:08:16.449921 4707 generic.go:334] "Generic (PLEG): container finished" podID="ed35aba4-eab4-46de-8b85-03f41d79c22f" containerID="65b351d910c0a585f249e2b08b1ae5ffa57b8975df9e461ef7ed584e8c2008fa" exitCode=0 Dec 04 10:08:16 crc kubenswrapper[4707]: I1204 10:08:16.450029 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-clscm" event={"ID":"ed35aba4-eab4-46de-8b85-03f41d79c22f","Type":"ContainerDied","Data":"65b351d910c0a585f249e2b08b1ae5ffa57b8975df9e461ef7ed584e8c2008fa"} Dec 04 10:08:16 crc kubenswrapper[4707]: I1204 10:08:16.452093 4707 generic.go:334] "Generic (PLEG): container finished" podID="c5f08d69-168a-4fa0-86b3-ffd17de510b8" containerID="ccace9e60ed2dbc032217b7952e8fc2923fbcba2dcf79cece724499a36dc950f" exitCode=0 Dec 04 10:08:16 crc kubenswrapper[4707]: I1204 10:08:16.452131 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvcwx" event={"ID":"c5f08d69-168a-4fa0-86b3-ffd17de510b8","Type":"ContainerDied","Data":"ccace9e60ed2dbc032217b7952e8fc2923fbcba2dcf79cece724499a36dc950f"} Dec 04 10:08:17 crc kubenswrapper[4707]: I1204 10:08:17.461489 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-clscm" event={"ID":"ed35aba4-eab4-46de-8b85-03f41d79c22f","Type":"ContainerStarted","Data":"c54de8d41087d7fccd019fec56bcaccb46656d32323f8870997c881b1a557df6"} Dec 04 10:08:17 crc kubenswrapper[4707]: I1204 10:08:17.463749 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvcwx" event={"ID":"c5f08d69-168a-4fa0-86b3-ffd17de510b8","Type":"ContainerStarted","Data":"3c17dd0ff7f7200e563ef9bfe0468f02aa6502bc959b327b7a99a87d06661b9d"} Dec 04 10:08:17 crc kubenswrapper[4707]: I1204 10:08:17.481742 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-clscm" podStartSLOduration=2.070513224 podStartE2EDuration="4.481723349s" podCreationTimestamp="2025-12-04 10:08:13 +0000 UTC" firstStartedPulling="2025-12-04 10:08:14.4300441 +0000 UTC m=+1793.865866607" lastFinishedPulling="2025-12-04 10:08:16.841254225 +0000 UTC m=+1796.277076732" observedRunningTime="2025-12-04 10:08:17.478682142 +0000 UTC m=+1796.914504649" watchObservedRunningTime="2025-12-04 10:08:17.481723349 +0000 UTC m=+1796.917545856" Dec 04 10:08:17 crc kubenswrapper[4707]: I1204 10:08:17.504291 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fvcwx" podStartSLOduration=2.119212302 podStartE2EDuration="4.504272946s" podCreationTimestamp="2025-12-04 10:08:13 +0000 UTC" firstStartedPulling="2025-12-04 10:08:14.429565234 +0000 UTC m=+1793.865387741" lastFinishedPulling="2025-12-04 10:08:16.814625868 +0000 UTC m=+1796.250448385" observedRunningTime="2025-12-04 10:08:17.501229959 +0000 UTC m=+1796.937052476" watchObservedRunningTime="2025-12-04 10:08:17.504272946 +0000 UTC m=+1796.940095453" Dec 04 10:08:23 crc kubenswrapper[4707]: I1204 10:08:23.090844 4707 scope.go:117] "RemoveContainer" containerID="64437ccf773361794da58f159508235121e29babaf40ef7b2e36b5da3630e44e" Dec 04 10:08:23 crc kubenswrapper[4707]: I1204 10:08:23.127390 4707 scope.go:117] "RemoveContainer" containerID="35e0af0eb78012c1f7891bf45420b54a68b82b5f86899841ddbdee92719e657b" Dec 04 10:08:23 crc kubenswrapper[4707]: I1204 10:08:23.143891 4707 scope.go:117] "RemoveContainer" containerID="a97f7903c46e1e76f61f3c621500d5c28879e1f2d888c3381efdfe5c22d74606" Dec 04 10:08:23 crc kubenswrapper[4707]: I1204 10:08:23.166262 4707 scope.go:117] "RemoveContainer" containerID="20f047cf685c38373adeb4fc55ec060340fded713d5daea02ff6d7d9b754af16" Dec 04 10:08:23 crc kubenswrapper[4707]: I1204 10:08:23.559461 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:23 crc kubenswrapper[4707]: I1204 10:08:23.559501 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:23 crc kubenswrapper[4707]: I1204 10:08:23.599319 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:23 crc kubenswrapper[4707]: I1204 10:08:23.753495 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:23 crc kubenswrapper[4707]: I1204 10:08:23.753726 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:23 crc kubenswrapper[4707]: I1204 10:08:23.788807 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:24 crc kubenswrapper[4707]: I1204 10:08:24.542884 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:24 crc kubenswrapper[4707]: I1204 10:08:24.546549 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:25 crc kubenswrapper[4707]: I1204 10:08:25.027722 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fvcwx"] Dec 04 10:08:26 crc kubenswrapper[4707]: I1204 10:08:26.520364 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fvcwx" podUID="c5f08d69-168a-4fa0-86b3-ffd17de510b8" containerName="registry-server" containerID="cri-o://3c17dd0ff7f7200e563ef9bfe0468f02aa6502bc959b327b7a99a87d06661b9d" gracePeriod=2 Dec 04 10:08:26 crc kubenswrapper[4707]: I1204 10:08:26.828127 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-clscm"] Dec 04 10:08:26 crc kubenswrapper[4707]: I1204 10:08:26.828416 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-clscm" podUID="ed35aba4-eab4-46de-8b85-03f41d79c22f" containerName="registry-server" containerID="cri-o://c54de8d41087d7fccd019fec56bcaccb46656d32323f8870997c881b1a557df6" gracePeriod=2 Dec 04 10:08:26 crc kubenswrapper[4707]: I1204 10:08:26.845531 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:08:26 crc kubenswrapper[4707]: E1204 10:08:26.845779 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.243727 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.386208 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed35aba4-eab4-46de-8b85-03f41d79c22f-utilities\") pod \"ed35aba4-eab4-46de-8b85-03f41d79c22f\" (UID: \"ed35aba4-eab4-46de-8b85-03f41d79c22f\") " Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.386283 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j55wm\" (UniqueName: \"kubernetes.io/projected/ed35aba4-eab4-46de-8b85-03f41d79c22f-kube-api-access-j55wm\") pod \"ed35aba4-eab4-46de-8b85-03f41d79c22f\" (UID: \"ed35aba4-eab4-46de-8b85-03f41d79c22f\") " Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.386434 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed35aba4-eab4-46de-8b85-03f41d79c22f-catalog-content\") pod \"ed35aba4-eab4-46de-8b85-03f41d79c22f\" (UID: \"ed35aba4-eab4-46de-8b85-03f41d79c22f\") " Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.387722 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed35aba4-eab4-46de-8b85-03f41d79c22f-utilities" (OuterVolumeSpecName: "utilities") pod "ed35aba4-eab4-46de-8b85-03f41d79c22f" (UID: "ed35aba4-eab4-46de-8b85-03f41d79c22f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.393223 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed35aba4-eab4-46de-8b85-03f41d79c22f-kube-api-access-j55wm" (OuterVolumeSpecName: "kube-api-access-j55wm") pod "ed35aba4-eab4-46de-8b85-03f41d79c22f" (UID: "ed35aba4-eab4-46de-8b85-03f41d79c22f"). InnerVolumeSpecName "kube-api-access-j55wm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.487634 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed35aba4-eab4-46de-8b85-03f41d79c22f-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.487670 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j55wm\" (UniqueName: \"kubernetes.io/projected/ed35aba4-eab4-46de-8b85-03f41d79c22f-kube-api-access-j55wm\") on node \"crc\" DevicePath \"\"" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.502236 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed35aba4-eab4-46de-8b85-03f41d79c22f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed35aba4-eab4-46de-8b85-03f41d79c22f" (UID: "ed35aba4-eab4-46de-8b85-03f41d79c22f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.532996 4707 generic.go:334] "Generic (PLEG): container finished" podID="ed35aba4-eab4-46de-8b85-03f41d79c22f" containerID="c54de8d41087d7fccd019fec56bcaccb46656d32323f8870997c881b1a557df6" exitCode=0 Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.533152 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-clscm" event={"ID":"ed35aba4-eab4-46de-8b85-03f41d79c22f","Type":"ContainerDied","Data":"c54de8d41087d7fccd019fec56bcaccb46656d32323f8870997c881b1a557df6"} Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.533178 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-clscm" event={"ID":"ed35aba4-eab4-46de-8b85-03f41d79c22f","Type":"ContainerDied","Data":"64458f543dbc948786738bfbcd0367b177a8e3f8af5535a0d463c18455bd00e6"} Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.533194 4707 scope.go:117] "RemoveContainer" containerID="c54de8d41087d7fccd019fec56bcaccb46656d32323f8870997c881b1a557df6" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.533314 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-clscm" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.538966 4707 generic.go:334] "Generic (PLEG): container finished" podID="c5f08d69-168a-4fa0-86b3-ffd17de510b8" containerID="3c17dd0ff7f7200e563ef9bfe0468f02aa6502bc959b327b7a99a87d06661b9d" exitCode=0 Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.539001 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvcwx" event={"ID":"c5f08d69-168a-4fa0-86b3-ffd17de510b8","Type":"ContainerDied","Data":"3c17dd0ff7f7200e563ef9bfe0468f02aa6502bc959b327b7a99a87d06661b9d"} Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.551994 4707 scope.go:117] "RemoveContainer" containerID="65b351d910c0a585f249e2b08b1ae5ffa57b8975df9e461ef7ed584e8c2008fa" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.562119 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-clscm"] Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.568395 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-clscm"] Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.588866 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed35aba4-eab4-46de-8b85-03f41d79c22f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.597198 4707 scope.go:117] "RemoveContainer" containerID="88cef275e82bbbc8c1415568f2638605e70be0f38ba3cbde0987d4be447d9866" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.612321 4707 scope.go:117] "RemoveContainer" containerID="c54de8d41087d7fccd019fec56bcaccb46656d32323f8870997c881b1a557df6" Dec 04 10:08:28 crc kubenswrapper[4707]: E1204 10:08:28.613048 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c54de8d41087d7fccd019fec56bcaccb46656d32323f8870997c881b1a557df6\": container with ID starting with c54de8d41087d7fccd019fec56bcaccb46656d32323f8870997c881b1a557df6 not found: ID does not exist" containerID="c54de8d41087d7fccd019fec56bcaccb46656d32323f8870997c881b1a557df6" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.613076 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c54de8d41087d7fccd019fec56bcaccb46656d32323f8870997c881b1a557df6"} err="failed to get container status \"c54de8d41087d7fccd019fec56bcaccb46656d32323f8870997c881b1a557df6\": rpc error: code = NotFound desc = could not find container \"c54de8d41087d7fccd019fec56bcaccb46656d32323f8870997c881b1a557df6\": container with ID starting with c54de8d41087d7fccd019fec56bcaccb46656d32323f8870997c881b1a557df6 not found: ID does not exist" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.613096 4707 scope.go:117] "RemoveContainer" containerID="65b351d910c0a585f249e2b08b1ae5ffa57b8975df9e461ef7ed584e8c2008fa" Dec 04 10:08:28 crc kubenswrapper[4707]: E1204 10:08:28.613470 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65b351d910c0a585f249e2b08b1ae5ffa57b8975df9e461ef7ed584e8c2008fa\": container with ID starting with 65b351d910c0a585f249e2b08b1ae5ffa57b8975df9e461ef7ed584e8c2008fa not found: ID does not exist" containerID="65b351d910c0a585f249e2b08b1ae5ffa57b8975df9e461ef7ed584e8c2008fa" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.613496 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65b351d910c0a585f249e2b08b1ae5ffa57b8975df9e461ef7ed584e8c2008fa"} err="failed to get container status \"65b351d910c0a585f249e2b08b1ae5ffa57b8975df9e461ef7ed584e8c2008fa\": rpc error: code = NotFound desc = could not find container \"65b351d910c0a585f249e2b08b1ae5ffa57b8975df9e461ef7ed584e8c2008fa\": container with ID starting with 65b351d910c0a585f249e2b08b1ae5ffa57b8975df9e461ef7ed584e8c2008fa not found: ID does not exist" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.613510 4707 scope.go:117] "RemoveContainer" containerID="88cef275e82bbbc8c1415568f2638605e70be0f38ba3cbde0987d4be447d9866" Dec 04 10:08:28 crc kubenswrapper[4707]: E1204 10:08:28.613732 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88cef275e82bbbc8c1415568f2638605e70be0f38ba3cbde0987d4be447d9866\": container with ID starting with 88cef275e82bbbc8c1415568f2638605e70be0f38ba3cbde0987d4be447d9866 not found: ID does not exist" containerID="88cef275e82bbbc8c1415568f2638605e70be0f38ba3cbde0987d4be447d9866" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.613753 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88cef275e82bbbc8c1415568f2638605e70be0f38ba3cbde0987d4be447d9866"} err="failed to get container status \"88cef275e82bbbc8c1415568f2638605e70be0f38ba3cbde0987d4be447d9866\": rpc error: code = NotFound desc = could not find container \"88cef275e82bbbc8c1415568f2638605e70be0f38ba3cbde0987d4be447d9866\": container with ID starting with 88cef275e82bbbc8c1415568f2638605e70be0f38ba3cbde0987d4be447d9866 not found: ID does not exist" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.635087 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.790686 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sc669\" (UniqueName: \"kubernetes.io/projected/c5f08d69-168a-4fa0-86b3-ffd17de510b8-kube-api-access-sc669\") pod \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\" (UID: \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\") " Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.790800 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5f08d69-168a-4fa0-86b3-ffd17de510b8-utilities\") pod \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\" (UID: \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\") " Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.790854 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5f08d69-168a-4fa0-86b3-ffd17de510b8-catalog-content\") pod \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\" (UID: \"c5f08d69-168a-4fa0-86b3-ffd17de510b8\") " Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.792247 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5f08d69-168a-4fa0-86b3-ffd17de510b8-utilities" (OuterVolumeSpecName: "utilities") pod "c5f08d69-168a-4fa0-86b3-ffd17de510b8" (UID: "c5f08d69-168a-4fa0-86b3-ffd17de510b8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.793820 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5f08d69-168a-4fa0-86b3-ffd17de510b8-kube-api-access-sc669" (OuterVolumeSpecName: "kube-api-access-sc669") pod "c5f08d69-168a-4fa0-86b3-ffd17de510b8" (UID: "c5f08d69-168a-4fa0-86b3-ffd17de510b8"). InnerVolumeSpecName "kube-api-access-sc669". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.844407 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5f08d69-168a-4fa0-86b3-ffd17de510b8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5f08d69-168a-4fa0-86b3-ffd17de510b8" (UID: "c5f08d69-168a-4fa0-86b3-ffd17de510b8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.851965 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed35aba4-eab4-46de-8b85-03f41d79c22f" path="/var/lib/kubelet/pods/ed35aba4-eab4-46de-8b85-03f41d79c22f/volumes" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.892461 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sc669\" (UniqueName: \"kubernetes.io/projected/c5f08d69-168a-4fa0-86b3-ffd17de510b8-kube-api-access-sc669\") on node \"crc\" DevicePath \"\"" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.892546 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5f08d69-168a-4fa0-86b3-ffd17de510b8-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 10:08:28 crc kubenswrapper[4707]: I1204 10:08:28.892561 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5f08d69-168a-4fa0-86b3-ffd17de510b8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 10:08:29 crc kubenswrapper[4707]: I1204 10:08:29.547725 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fvcwx" event={"ID":"c5f08d69-168a-4fa0-86b3-ffd17de510b8","Type":"ContainerDied","Data":"8a594bada8a8cd7f58e05612fa4d1dd9cf29b39a426c0aa725ae559d4e4bc80e"} Dec 04 10:08:29 crc kubenswrapper[4707]: I1204 10:08:29.547766 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fvcwx" Dec 04 10:08:29 crc kubenswrapper[4707]: I1204 10:08:29.547784 4707 scope.go:117] "RemoveContainer" containerID="3c17dd0ff7f7200e563ef9bfe0468f02aa6502bc959b327b7a99a87d06661b9d" Dec 04 10:08:29 crc kubenswrapper[4707]: I1204 10:08:29.564308 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fvcwx"] Dec 04 10:08:29 crc kubenswrapper[4707]: I1204 10:08:29.566263 4707 scope.go:117] "RemoveContainer" containerID="ccace9e60ed2dbc032217b7952e8fc2923fbcba2dcf79cece724499a36dc950f" Dec 04 10:08:29 crc kubenswrapper[4707]: I1204 10:08:29.568830 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fvcwx"] Dec 04 10:08:29 crc kubenswrapper[4707]: I1204 10:08:29.579830 4707 scope.go:117] "RemoveContainer" containerID="abd0daa1ef87de4d274f2b69309b41db6f50dc336bc5554246e1e083a588fdc4" Dec 04 10:08:30 crc kubenswrapper[4707]: I1204 10:08:30.852584 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5f08d69-168a-4fa0-86b3-ffd17de510b8" path="/var/lib/kubelet/pods/c5f08d69-168a-4fa0-86b3-ffd17de510b8/volumes" Dec 04 10:08:39 crc kubenswrapper[4707]: I1204 10:08:39.845646 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:08:39 crc kubenswrapper[4707]: E1204 10:08:39.846830 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:08:50 crc kubenswrapper[4707]: I1204 10:08:50.851056 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:08:50 crc kubenswrapper[4707]: E1204 10:08:50.851874 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:09:04 crc kubenswrapper[4707]: I1204 10:09:04.845371 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:09:04 crc kubenswrapper[4707]: E1204 10:09:04.847826 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:09:17 crc kubenswrapper[4707]: I1204 10:09:17.844657 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:09:17 crc kubenswrapper[4707]: E1204 10:09:17.845386 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:09:23 crc kubenswrapper[4707]: I1204 10:09:23.233939 4707 scope.go:117] "RemoveContainer" containerID="a4d03d85ae7d1a62b7d71861cd080c56ffe7eb4fe75ebb078c52e903ebe0b1a6" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.769194 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mtcjd/must-gather-r2wlr"] Dec 04 10:09:29 crc kubenswrapper[4707]: E1204 10:09:29.770032 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5f08d69-168a-4fa0-86b3-ffd17de510b8" containerName="extract-utilities" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.770050 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5f08d69-168a-4fa0-86b3-ffd17de510b8" containerName="extract-utilities" Dec 04 10:09:29 crc kubenswrapper[4707]: E1204 10:09:29.770073 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed35aba4-eab4-46de-8b85-03f41d79c22f" containerName="extract-utilities" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.770081 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed35aba4-eab4-46de-8b85-03f41d79c22f" containerName="extract-utilities" Dec 04 10:09:29 crc kubenswrapper[4707]: E1204 10:09:29.770096 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5f08d69-168a-4fa0-86b3-ffd17de510b8" containerName="registry-server" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.770106 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5f08d69-168a-4fa0-86b3-ffd17de510b8" containerName="registry-server" Dec 04 10:09:29 crc kubenswrapper[4707]: E1204 10:09:29.770118 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5f08d69-168a-4fa0-86b3-ffd17de510b8" containerName="extract-content" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.770125 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5f08d69-168a-4fa0-86b3-ffd17de510b8" containerName="extract-content" Dec 04 10:09:29 crc kubenswrapper[4707]: E1204 10:09:29.770136 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed35aba4-eab4-46de-8b85-03f41d79c22f" containerName="registry-server" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.770160 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed35aba4-eab4-46de-8b85-03f41d79c22f" containerName="registry-server" Dec 04 10:09:29 crc kubenswrapper[4707]: E1204 10:09:29.770174 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed35aba4-eab4-46de-8b85-03f41d79c22f" containerName="extract-content" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.770183 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed35aba4-eab4-46de-8b85-03f41d79c22f" containerName="extract-content" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.770306 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed35aba4-eab4-46de-8b85-03f41d79c22f" containerName="registry-server" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.770352 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5f08d69-168a-4fa0-86b3-ffd17de510b8" containerName="registry-server" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.771038 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mtcjd/must-gather-r2wlr" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.774674 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-mtcjd"/"openshift-service-ca.crt" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.774925 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-mtcjd"/"kube-root-ca.crt" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.786006 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-mtcjd/must-gather-r2wlr"] Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.897411 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vldtr\" (UniqueName: \"kubernetes.io/projected/6524269a-646b-4df0-8cb1-26b909e8dd13-kube-api-access-vldtr\") pod \"must-gather-r2wlr\" (UID: \"6524269a-646b-4df0-8cb1-26b909e8dd13\") " pod="openshift-must-gather-mtcjd/must-gather-r2wlr" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.897478 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6524269a-646b-4df0-8cb1-26b909e8dd13-must-gather-output\") pod \"must-gather-r2wlr\" (UID: \"6524269a-646b-4df0-8cb1-26b909e8dd13\") " pod="openshift-must-gather-mtcjd/must-gather-r2wlr" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.999425 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6524269a-646b-4df0-8cb1-26b909e8dd13-must-gather-output\") pod \"must-gather-r2wlr\" (UID: \"6524269a-646b-4df0-8cb1-26b909e8dd13\") " pod="openshift-must-gather-mtcjd/must-gather-r2wlr" Dec 04 10:09:29 crc kubenswrapper[4707]: I1204 10:09:29.999575 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vldtr\" (UniqueName: \"kubernetes.io/projected/6524269a-646b-4df0-8cb1-26b909e8dd13-kube-api-access-vldtr\") pod \"must-gather-r2wlr\" (UID: \"6524269a-646b-4df0-8cb1-26b909e8dd13\") " pod="openshift-must-gather-mtcjd/must-gather-r2wlr" Dec 04 10:09:30 crc kubenswrapper[4707]: I1204 10:09:29.999993 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6524269a-646b-4df0-8cb1-26b909e8dd13-must-gather-output\") pod \"must-gather-r2wlr\" (UID: \"6524269a-646b-4df0-8cb1-26b909e8dd13\") " pod="openshift-must-gather-mtcjd/must-gather-r2wlr" Dec 04 10:09:30 crc kubenswrapper[4707]: I1204 10:09:30.018866 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vldtr\" (UniqueName: \"kubernetes.io/projected/6524269a-646b-4df0-8cb1-26b909e8dd13-kube-api-access-vldtr\") pod \"must-gather-r2wlr\" (UID: \"6524269a-646b-4df0-8cb1-26b909e8dd13\") " pod="openshift-must-gather-mtcjd/must-gather-r2wlr" Dec 04 10:09:30 crc kubenswrapper[4707]: I1204 10:09:30.086608 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mtcjd/must-gather-r2wlr" Dec 04 10:09:30 crc kubenswrapper[4707]: I1204 10:09:30.288096 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-mtcjd/must-gather-r2wlr"] Dec 04 10:09:31 crc kubenswrapper[4707]: I1204 10:09:31.084255 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mtcjd/must-gather-r2wlr" event={"ID":"6524269a-646b-4df0-8cb1-26b909e8dd13","Type":"ContainerStarted","Data":"9cf0393c6959eb14ed99e5a42d1d31c807bcd8f7de81e5e503f814955df95f36"} Dec 04 10:09:31 crc kubenswrapper[4707]: I1204 10:09:31.084624 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mtcjd/must-gather-r2wlr" event={"ID":"6524269a-646b-4df0-8cb1-26b909e8dd13","Type":"ContainerStarted","Data":"1c0e5aadd8af33df7534849b1691408c3b2f12c89faa3049f4df68f44c12ffb6"} Dec 04 10:09:31 crc kubenswrapper[4707]: I1204 10:09:31.084644 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mtcjd/must-gather-r2wlr" event={"ID":"6524269a-646b-4df0-8cb1-26b909e8dd13","Type":"ContainerStarted","Data":"c9e5bc4c8a09c59ceaf02104d32395ad569e8ea459e11cafa60a196572cb0e07"} Dec 04 10:09:31 crc kubenswrapper[4707]: I1204 10:09:31.100538 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mtcjd/must-gather-r2wlr" podStartSLOduration=2.100516416 podStartE2EDuration="2.100516416s" podCreationTimestamp="2025-12-04 10:09:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:09:31.097017685 +0000 UTC m=+1870.532840202" watchObservedRunningTime="2025-12-04 10:09:31.100516416 +0000 UTC m=+1870.536338923" Dec 04 10:09:32 crc kubenswrapper[4707]: I1204 10:09:32.845329 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:09:32 crc kubenswrapper[4707]: E1204 10:09:32.845853 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:09:47 crc kubenswrapper[4707]: I1204 10:09:47.845178 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:09:47 crc kubenswrapper[4707]: E1204 10:09:47.845979 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:09:59 crc kubenswrapper[4707]: I1204 10:09:59.845191 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:09:59 crc kubenswrapper[4707]: E1204 10:09:59.845761 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:10:10 crc kubenswrapper[4707]: I1204 10:10:10.012169 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-dwmkj_7528abe2-fb27-4c14-88c6-98fcbb716395/control-plane-machine-set-operator/0.log" Dec 04 10:10:10 crc kubenswrapper[4707]: I1204 10:10:10.124719 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-22vzf_5a08fa03-e041-425a-b5e8-05300cdac87b/kube-rbac-proxy/0.log" Dec 04 10:10:10 crc kubenswrapper[4707]: I1204 10:10:10.142952 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-22vzf_5a08fa03-e041-425a-b5e8-05300cdac87b/machine-api-operator/0.log" Dec 04 10:10:13 crc kubenswrapper[4707]: I1204 10:10:13.845537 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:10:13 crc kubenswrapper[4707]: E1204 10:10:13.845812 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:10:23 crc kubenswrapper[4707]: I1204 10:10:23.901737 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-kv6jh_058a219f-7ca4-486e-87e2-7406ad069250/controller/0.log" Dec 04 10:10:23 crc kubenswrapper[4707]: I1204 10:10:23.906257 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-kv6jh_058a219f-7ca4-486e-87e2-7406ad069250/kube-rbac-proxy/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.057051 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-frr-files/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.224165 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-frr-files/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.233863 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-metrics/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.246161 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-reloader/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.271242 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-reloader/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.419579 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-reloader/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.425761 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-metrics/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.444849 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-frr-files/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.481494 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-metrics/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.603136 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-frr-files/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.622700 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-metrics/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.627300 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/controller/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.629271 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/cp-reloader/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.795135 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/frr-metrics/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.827400 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/kube-rbac-proxy/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.835066 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/kube-rbac-proxy-frr/0.log" Dec 04 10:10:24 crc kubenswrapper[4707]: I1204 10:10:24.848433 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:10:24 crc kubenswrapper[4707]: E1204 10:10:24.848642 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:10:25 crc kubenswrapper[4707]: I1204 10:10:25.002097 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/reloader/0.log" Dec 04 10:10:25 crc kubenswrapper[4707]: I1204 10:10:25.017876 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-vxncn_31fd648a-f639-45c0-a30c-77afc9cafedc/frr-k8s-webhook-server/0.log" Dec 04 10:10:25 crc kubenswrapper[4707]: I1204 10:10:25.217897 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5657775d6b-zhwls_41bddb2a-8c9f-42a0-a450-06a9e755c211/manager/0.log" Dec 04 10:10:25 crc kubenswrapper[4707]: I1204 10:10:25.232765 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r9wp9_71b37f42-f459-4829-8112-3db6b09fc06d/frr/0.log" Dec 04 10:10:25 crc kubenswrapper[4707]: I1204 10:10:25.375945 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-fcf969487-8d9dx_6a9e4f53-c751-4994-8e44-6bcc07b40dc8/webhook-server/0.log" Dec 04 10:10:25 crc kubenswrapper[4707]: I1204 10:10:25.405830 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-599tz_52f06bc6-db65-4283-961c-3bee70be7363/kube-rbac-proxy/0.log" Dec 04 10:10:25 crc kubenswrapper[4707]: I1204 10:10:25.587951 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-599tz_52f06bc6-db65-4283-961c-3bee70be7363/speaker/0.log" Dec 04 10:10:39 crc kubenswrapper[4707]: I1204 10:10:39.845229 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:10:39 crc kubenswrapper[4707]: E1204 10:10:39.845957 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:10:47 crc kubenswrapper[4707]: I1204 10:10:47.610507 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz_1b113f24-11b8-4720-87a6-ccae8b3f888e/util/0.log" Dec 04 10:10:47 crc kubenswrapper[4707]: I1204 10:10:47.720512 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz_1b113f24-11b8-4720-87a6-ccae8b3f888e/util/0.log" Dec 04 10:10:47 crc kubenswrapper[4707]: I1204 10:10:47.737842 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz_1b113f24-11b8-4720-87a6-ccae8b3f888e/pull/0.log" Dec 04 10:10:47 crc kubenswrapper[4707]: I1204 10:10:47.767433 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz_1b113f24-11b8-4720-87a6-ccae8b3f888e/pull/0.log" Dec 04 10:10:47 crc kubenswrapper[4707]: I1204 10:10:47.924255 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz_1b113f24-11b8-4720-87a6-ccae8b3f888e/util/0.log" Dec 04 10:10:47 crc kubenswrapper[4707]: I1204 10:10:47.958131 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz_1b113f24-11b8-4720-87a6-ccae8b3f888e/extract/0.log" Dec 04 10:10:47 crc kubenswrapper[4707]: I1204 10:10:47.966688 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839xfxz_1b113f24-11b8-4720-87a6-ccae8b3f888e/pull/0.log" Dec 04 10:10:48 crc kubenswrapper[4707]: I1204 10:10:48.089469 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-flfxl_ae1d3840-1144-4905-8415-8817aa67d299/extract-utilities/0.log" Dec 04 10:10:48 crc kubenswrapper[4707]: I1204 10:10:48.304802 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-flfxl_ae1d3840-1144-4905-8415-8817aa67d299/extract-content/0.log" Dec 04 10:10:48 crc kubenswrapper[4707]: I1204 10:10:48.306739 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-flfxl_ae1d3840-1144-4905-8415-8817aa67d299/extract-utilities/0.log" Dec 04 10:10:48 crc kubenswrapper[4707]: I1204 10:10:48.356835 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-flfxl_ae1d3840-1144-4905-8415-8817aa67d299/extract-content/0.log" Dec 04 10:10:48 crc kubenswrapper[4707]: I1204 10:10:48.455740 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-flfxl_ae1d3840-1144-4905-8415-8817aa67d299/extract-content/0.log" Dec 04 10:10:48 crc kubenswrapper[4707]: I1204 10:10:48.475823 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-flfxl_ae1d3840-1144-4905-8415-8817aa67d299/extract-utilities/0.log" Dec 04 10:10:48 crc kubenswrapper[4707]: I1204 10:10:48.771606 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xbrw2_ad0fabbb-7625-4520-8298-8379635bb03c/extract-utilities/0.log" Dec 04 10:10:48 crc kubenswrapper[4707]: I1204 10:10:48.899039 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-flfxl_ae1d3840-1144-4905-8415-8817aa67d299/registry-server/0.log" Dec 04 10:10:48 crc kubenswrapper[4707]: I1204 10:10:48.965602 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xbrw2_ad0fabbb-7625-4520-8298-8379635bb03c/extract-content/0.log" Dec 04 10:10:48 crc kubenswrapper[4707]: I1204 10:10:48.988051 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xbrw2_ad0fabbb-7625-4520-8298-8379635bb03c/extract-utilities/0.log" Dec 04 10:10:49 crc kubenswrapper[4707]: I1204 10:10:49.006497 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xbrw2_ad0fabbb-7625-4520-8298-8379635bb03c/extract-content/0.log" Dec 04 10:10:49 crc kubenswrapper[4707]: I1204 10:10:49.168288 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xbrw2_ad0fabbb-7625-4520-8298-8379635bb03c/extract-content/0.log" Dec 04 10:10:49 crc kubenswrapper[4707]: I1204 10:10:49.169790 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xbrw2_ad0fabbb-7625-4520-8298-8379635bb03c/extract-utilities/0.log" Dec 04 10:10:49 crc kubenswrapper[4707]: I1204 10:10:49.522568 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-8qmqt_860836b6-d7c9-4c56-9193-c4bbaeca659b/marketplace-operator/0.log" Dec 04 10:10:49 crc kubenswrapper[4707]: I1204 10:10:49.619738 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ngcrl_2a03b094-f535-4b09-9bef-016450d98586/extract-utilities/0.log" Dec 04 10:10:49 crc kubenswrapper[4707]: I1204 10:10:49.799184 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xbrw2_ad0fabbb-7625-4520-8298-8379635bb03c/registry-server/0.log" Dec 04 10:10:49 crc kubenswrapper[4707]: I1204 10:10:49.812363 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ngcrl_2a03b094-f535-4b09-9bef-016450d98586/extract-utilities/0.log" Dec 04 10:10:49 crc kubenswrapper[4707]: I1204 10:10:49.846474 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ngcrl_2a03b094-f535-4b09-9bef-016450d98586/extract-content/0.log" Dec 04 10:10:49 crc kubenswrapper[4707]: I1204 10:10:49.947142 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ngcrl_2a03b094-f535-4b09-9bef-016450d98586/extract-content/0.log" Dec 04 10:10:50 crc kubenswrapper[4707]: I1204 10:10:50.053201 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ngcrl_2a03b094-f535-4b09-9bef-016450d98586/extract-utilities/0.log" Dec 04 10:10:50 crc kubenswrapper[4707]: I1204 10:10:50.058984 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ngcrl_2a03b094-f535-4b09-9bef-016450d98586/extract-content/0.log" Dec 04 10:10:50 crc kubenswrapper[4707]: I1204 10:10:50.138958 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ngcrl_2a03b094-f535-4b09-9bef-016450d98586/registry-server/0.log" Dec 04 10:10:50 crc kubenswrapper[4707]: I1204 10:10:50.240521 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcrk2_5ff4a232-1027-4cb9-a021-9b320f41b041/extract-utilities/0.log" Dec 04 10:10:50 crc kubenswrapper[4707]: I1204 10:10:50.458862 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcrk2_5ff4a232-1027-4cb9-a021-9b320f41b041/extract-utilities/0.log" Dec 04 10:10:50 crc kubenswrapper[4707]: I1204 10:10:50.459449 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcrk2_5ff4a232-1027-4cb9-a021-9b320f41b041/extract-content/0.log" Dec 04 10:10:50 crc kubenswrapper[4707]: I1204 10:10:50.461999 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcrk2_5ff4a232-1027-4cb9-a021-9b320f41b041/extract-content/0.log" Dec 04 10:10:50 crc kubenswrapper[4707]: I1204 10:10:50.626279 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcrk2_5ff4a232-1027-4cb9-a021-9b320f41b041/extract-content/0.log" Dec 04 10:10:50 crc kubenswrapper[4707]: I1204 10:10:50.662915 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcrk2_5ff4a232-1027-4cb9-a021-9b320f41b041/extract-utilities/0.log" Dec 04 10:10:50 crc kubenswrapper[4707]: I1204 10:10:50.848204 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:10:50 crc kubenswrapper[4707]: E1204 10:10:50.848508 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:10:51 crc kubenswrapper[4707]: I1204 10:10:51.097807 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcrk2_5ff4a232-1027-4cb9-a021-9b320f41b041/registry-server/0.log" Dec 04 10:11:05 crc kubenswrapper[4707]: I1204 10:11:05.844827 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:11:05 crc kubenswrapper[4707]: E1204 10:11:05.845735 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:11:20 crc kubenswrapper[4707]: I1204 10:11:20.847915 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:11:20 crc kubenswrapper[4707]: E1204 10:11:20.848755 4707 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c244z_openshift-machine-config-operator(e64897e0-4162-4aa8-9c13-8a4262a3ca3d)\"" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" Dec 04 10:11:33 crc kubenswrapper[4707]: I1204 10:11:33.845033 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:11:34 crc kubenswrapper[4707]: I1204 10:11:34.890466 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerStarted","Data":"0b78a3d88e45e4d2a23e11ada15e5473970a0d8a8427c810fea0b139ad97f1e1"} Dec 04 10:12:02 crc kubenswrapper[4707]: I1204 10:12:02.042063 4707 generic.go:334] "Generic (PLEG): container finished" podID="6524269a-646b-4df0-8cb1-26b909e8dd13" containerID="1c0e5aadd8af33df7534849b1691408c3b2f12c89faa3049f4df68f44c12ffb6" exitCode=0 Dec 04 10:12:02 crc kubenswrapper[4707]: I1204 10:12:02.042328 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mtcjd/must-gather-r2wlr" event={"ID":"6524269a-646b-4df0-8cb1-26b909e8dd13","Type":"ContainerDied","Data":"1c0e5aadd8af33df7534849b1691408c3b2f12c89faa3049f4df68f44c12ffb6"} Dec 04 10:12:02 crc kubenswrapper[4707]: I1204 10:12:02.042994 4707 scope.go:117] "RemoveContainer" containerID="1c0e5aadd8af33df7534849b1691408c3b2f12c89faa3049f4df68f44c12ffb6" Dec 04 10:12:03 crc kubenswrapper[4707]: I1204 10:12:03.056177 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mtcjd_must-gather-r2wlr_6524269a-646b-4df0-8cb1-26b909e8dd13/gather/0.log" Dec 04 10:12:12 crc kubenswrapper[4707]: I1204 10:12:12.258168 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mtcjd/must-gather-r2wlr"] Dec 04 10:12:12 crc kubenswrapper[4707]: I1204 10:12:12.258945 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-mtcjd/must-gather-r2wlr" podUID="6524269a-646b-4df0-8cb1-26b909e8dd13" containerName="copy" containerID="cri-o://9cf0393c6959eb14ed99e5a42d1d31c807bcd8f7de81e5e503f814955df95f36" gracePeriod=2 Dec 04 10:12:12 crc kubenswrapper[4707]: I1204 10:12:12.262706 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mtcjd/must-gather-r2wlr"] Dec 04 10:12:12 crc kubenswrapper[4707]: I1204 10:12:12.751162 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mtcjd_must-gather-r2wlr_6524269a-646b-4df0-8cb1-26b909e8dd13/copy/0.log" Dec 04 10:12:12 crc kubenswrapper[4707]: I1204 10:12:12.752562 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mtcjd/must-gather-r2wlr" Dec 04 10:12:12 crc kubenswrapper[4707]: I1204 10:12:12.835906 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vldtr\" (UniqueName: \"kubernetes.io/projected/6524269a-646b-4df0-8cb1-26b909e8dd13-kube-api-access-vldtr\") pod \"6524269a-646b-4df0-8cb1-26b909e8dd13\" (UID: \"6524269a-646b-4df0-8cb1-26b909e8dd13\") " Dec 04 10:12:12 crc kubenswrapper[4707]: I1204 10:12:12.835988 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6524269a-646b-4df0-8cb1-26b909e8dd13-must-gather-output\") pod \"6524269a-646b-4df0-8cb1-26b909e8dd13\" (UID: \"6524269a-646b-4df0-8cb1-26b909e8dd13\") " Dec 04 10:12:12 crc kubenswrapper[4707]: I1204 10:12:12.858112 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6524269a-646b-4df0-8cb1-26b909e8dd13-kube-api-access-vldtr" (OuterVolumeSpecName: "kube-api-access-vldtr") pod "6524269a-646b-4df0-8cb1-26b909e8dd13" (UID: "6524269a-646b-4df0-8cb1-26b909e8dd13"). InnerVolumeSpecName "kube-api-access-vldtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:12:12 crc kubenswrapper[4707]: I1204 10:12:12.917154 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6524269a-646b-4df0-8cb1-26b909e8dd13-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "6524269a-646b-4df0-8cb1-26b909e8dd13" (UID: "6524269a-646b-4df0-8cb1-26b909e8dd13"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:12:12 crc kubenswrapper[4707]: I1204 10:12:12.937085 4707 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6524269a-646b-4df0-8cb1-26b909e8dd13-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 04 10:12:12 crc kubenswrapper[4707]: I1204 10:12:12.937114 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vldtr\" (UniqueName: \"kubernetes.io/projected/6524269a-646b-4df0-8cb1-26b909e8dd13-kube-api-access-vldtr\") on node \"crc\" DevicePath \"\"" Dec 04 10:12:13 crc kubenswrapper[4707]: I1204 10:12:13.117267 4707 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mtcjd_must-gather-r2wlr_6524269a-646b-4df0-8cb1-26b909e8dd13/copy/0.log" Dec 04 10:12:13 crc kubenswrapper[4707]: I1204 10:12:13.118023 4707 generic.go:334] "Generic (PLEG): container finished" podID="6524269a-646b-4df0-8cb1-26b909e8dd13" containerID="9cf0393c6959eb14ed99e5a42d1d31c807bcd8f7de81e5e503f814955df95f36" exitCode=143 Dec 04 10:12:13 crc kubenswrapper[4707]: I1204 10:12:13.118087 4707 scope.go:117] "RemoveContainer" containerID="9cf0393c6959eb14ed99e5a42d1d31c807bcd8f7de81e5e503f814955df95f36" Dec 04 10:12:13 crc kubenswrapper[4707]: I1204 10:12:13.118162 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mtcjd/must-gather-r2wlr" Dec 04 10:12:13 crc kubenswrapper[4707]: I1204 10:12:13.155271 4707 scope.go:117] "RemoveContainer" containerID="1c0e5aadd8af33df7534849b1691408c3b2f12c89faa3049f4df68f44c12ffb6" Dec 04 10:12:13 crc kubenswrapper[4707]: I1204 10:12:13.198034 4707 scope.go:117] "RemoveContainer" containerID="9cf0393c6959eb14ed99e5a42d1d31c807bcd8f7de81e5e503f814955df95f36" Dec 04 10:12:13 crc kubenswrapper[4707]: E1204 10:12:13.198994 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cf0393c6959eb14ed99e5a42d1d31c807bcd8f7de81e5e503f814955df95f36\": container with ID starting with 9cf0393c6959eb14ed99e5a42d1d31c807bcd8f7de81e5e503f814955df95f36 not found: ID does not exist" containerID="9cf0393c6959eb14ed99e5a42d1d31c807bcd8f7de81e5e503f814955df95f36" Dec 04 10:12:13 crc kubenswrapper[4707]: I1204 10:12:13.199029 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cf0393c6959eb14ed99e5a42d1d31c807bcd8f7de81e5e503f814955df95f36"} err="failed to get container status \"9cf0393c6959eb14ed99e5a42d1d31c807bcd8f7de81e5e503f814955df95f36\": rpc error: code = NotFound desc = could not find container \"9cf0393c6959eb14ed99e5a42d1d31c807bcd8f7de81e5e503f814955df95f36\": container with ID starting with 9cf0393c6959eb14ed99e5a42d1d31c807bcd8f7de81e5e503f814955df95f36 not found: ID does not exist" Dec 04 10:12:13 crc kubenswrapper[4707]: I1204 10:12:13.199049 4707 scope.go:117] "RemoveContainer" containerID="1c0e5aadd8af33df7534849b1691408c3b2f12c89faa3049f4df68f44c12ffb6" Dec 04 10:12:13 crc kubenswrapper[4707]: E1204 10:12:13.199464 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c0e5aadd8af33df7534849b1691408c3b2f12c89faa3049f4df68f44c12ffb6\": container with ID starting with 1c0e5aadd8af33df7534849b1691408c3b2f12c89faa3049f4df68f44c12ffb6 not found: ID does not exist" containerID="1c0e5aadd8af33df7534849b1691408c3b2f12c89faa3049f4df68f44c12ffb6" Dec 04 10:12:13 crc kubenswrapper[4707]: I1204 10:12:13.199481 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c0e5aadd8af33df7534849b1691408c3b2f12c89faa3049f4df68f44c12ffb6"} err="failed to get container status \"1c0e5aadd8af33df7534849b1691408c3b2f12c89faa3049f4df68f44c12ffb6\": rpc error: code = NotFound desc = could not find container \"1c0e5aadd8af33df7534849b1691408c3b2f12c89faa3049f4df68f44c12ffb6\": container with ID starting with 1c0e5aadd8af33df7534849b1691408c3b2f12c89faa3049f4df68f44c12ffb6 not found: ID does not exist" Dec 04 10:12:14 crc kubenswrapper[4707]: I1204 10:12:14.851160 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6524269a-646b-4df0-8cb1-26b909e8dd13" path="/var/lib/kubelet/pods/6524269a-646b-4df0-8cb1-26b909e8dd13/volumes" Dec 04 10:14:00 crc kubenswrapper[4707]: I1204 10:14:00.817102 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 10:14:00 crc kubenswrapper[4707]: I1204 10:14:00.818760 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 10:14:30 crc kubenswrapper[4707]: I1204 10:14:30.817799 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 10:14:30 crc kubenswrapper[4707]: I1204 10:14:30.818327 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.153435 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8"] Dec 04 10:15:00 crc kubenswrapper[4707]: E1204 10:15:00.154239 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6524269a-646b-4df0-8cb1-26b909e8dd13" containerName="gather" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.154254 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="6524269a-646b-4df0-8cb1-26b909e8dd13" containerName="gather" Dec 04 10:15:00 crc kubenswrapper[4707]: E1204 10:15:00.154288 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6524269a-646b-4df0-8cb1-26b909e8dd13" containerName="copy" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.154294 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="6524269a-646b-4df0-8cb1-26b909e8dd13" containerName="copy" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.154430 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="6524269a-646b-4df0-8cb1-26b909e8dd13" containerName="gather" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.154443 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="6524269a-646b-4df0-8cb1-26b909e8dd13" containerName="copy" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.154789 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.157376 4707 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.160077 4707 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.165480 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8"] Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.307982 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8v54q\" (UniqueName: \"kubernetes.io/projected/dc0d3be4-db60-4d28-ba62-debad8df513b-kube-api-access-8v54q\") pod \"collect-profiles-29414055-xb2r8\" (UID: \"dc0d3be4-db60-4d28-ba62-debad8df513b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.308075 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dc0d3be4-db60-4d28-ba62-debad8df513b-config-volume\") pod \"collect-profiles-29414055-xb2r8\" (UID: \"dc0d3be4-db60-4d28-ba62-debad8df513b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.308202 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dc0d3be4-db60-4d28-ba62-debad8df513b-secret-volume\") pod \"collect-profiles-29414055-xb2r8\" (UID: \"dc0d3be4-db60-4d28-ba62-debad8df513b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.409737 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8v54q\" (UniqueName: \"kubernetes.io/projected/dc0d3be4-db60-4d28-ba62-debad8df513b-kube-api-access-8v54q\") pod \"collect-profiles-29414055-xb2r8\" (UID: \"dc0d3be4-db60-4d28-ba62-debad8df513b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.409803 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dc0d3be4-db60-4d28-ba62-debad8df513b-config-volume\") pod \"collect-profiles-29414055-xb2r8\" (UID: \"dc0d3be4-db60-4d28-ba62-debad8df513b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.409827 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dc0d3be4-db60-4d28-ba62-debad8df513b-secret-volume\") pod \"collect-profiles-29414055-xb2r8\" (UID: \"dc0d3be4-db60-4d28-ba62-debad8df513b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.411356 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dc0d3be4-db60-4d28-ba62-debad8df513b-config-volume\") pod \"collect-profiles-29414055-xb2r8\" (UID: \"dc0d3be4-db60-4d28-ba62-debad8df513b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.416639 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dc0d3be4-db60-4d28-ba62-debad8df513b-secret-volume\") pod \"collect-profiles-29414055-xb2r8\" (UID: \"dc0d3be4-db60-4d28-ba62-debad8df513b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.441549 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8v54q\" (UniqueName: \"kubernetes.io/projected/dc0d3be4-db60-4d28-ba62-debad8df513b-kube-api-access-8v54q\") pod \"collect-profiles-29414055-xb2r8\" (UID: \"dc0d3be4-db60-4d28-ba62-debad8df513b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.474075 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.817247 4707 patch_prober.go:28] interesting pod/machine-config-daemon-c244z container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.817306 4707 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.817363 4707 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c244z" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.818059 4707 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0b78a3d88e45e4d2a23e11ada15e5473970a0d8a8427c810fea0b139ad97f1e1"} pod="openshift-machine-config-operator/machine-config-daemon-c244z" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.818116 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c244z" podUID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerName="machine-config-daemon" containerID="cri-o://0b78a3d88e45e4d2a23e11ada15e5473970a0d8a8427c810fea0b139ad97f1e1" gracePeriod=600 Dec 04 10:15:00 crc kubenswrapper[4707]: I1204 10:15:00.893166 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8"] Dec 04 10:15:01 crc kubenswrapper[4707]: I1204 10:15:01.088442 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" event={"ID":"dc0d3be4-db60-4d28-ba62-debad8df513b","Type":"ContainerStarted","Data":"6bf6a12f8980afbc13466cfcb7e5208d6c566aebd5aa98ff05a195ae11524771"} Dec 04 10:15:01 crc kubenswrapper[4707]: I1204 10:15:01.088495 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" event={"ID":"dc0d3be4-db60-4d28-ba62-debad8df513b","Type":"ContainerStarted","Data":"d08109ec91ac181cf1588368aa81563854a65bd28f2d835e4fcb5d6aafcb82d3"} Dec 04 10:15:01 crc kubenswrapper[4707]: I1204 10:15:01.098951 4707 generic.go:334] "Generic (PLEG): container finished" podID="e64897e0-4162-4aa8-9c13-8a4262a3ca3d" containerID="0b78a3d88e45e4d2a23e11ada15e5473970a0d8a8427c810fea0b139ad97f1e1" exitCode=0 Dec 04 10:15:01 crc kubenswrapper[4707]: I1204 10:15:01.099001 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerDied","Data":"0b78a3d88e45e4d2a23e11ada15e5473970a0d8a8427c810fea0b139ad97f1e1"} Dec 04 10:15:01 crc kubenswrapper[4707]: I1204 10:15:01.099032 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c244z" event={"ID":"e64897e0-4162-4aa8-9c13-8a4262a3ca3d","Type":"ContainerStarted","Data":"d6d7553ce24871adc254b26353eb698d49ce833a05df8e2c7676fa8f248f7b64"} Dec 04 10:15:01 crc kubenswrapper[4707]: I1204 10:15:01.099050 4707 scope.go:117] "RemoveContainer" containerID="c047b6f3f2a44133a33f5010666c795ec84a210be272973a7ce6dc5e5fae1ed8" Dec 04 10:15:01 crc kubenswrapper[4707]: I1204 10:15:01.106530 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" podStartSLOduration=1.106507662 podStartE2EDuration="1.106507662s" podCreationTimestamp="2025-12-04 10:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 10:15:01.102448505 +0000 UTC m=+2200.538271032" watchObservedRunningTime="2025-12-04 10:15:01.106507662 +0000 UTC m=+2200.542330169" Dec 04 10:15:02 crc kubenswrapper[4707]: I1204 10:15:02.105766 4707 generic.go:334] "Generic (PLEG): container finished" podID="dc0d3be4-db60-4d28-ba62-debad8df513b" containerID="6bf6a12f8980afbc13466cfcb7e5208d6c566aebd5aa98ff05a195ae11524771" exitCode=0 Dec 04 10:15:02 crc kubenswrapper[4707]: I1204 10:15:02.105869 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" event={"ID":"dc0d3be4-db60-4d28-ba62-debad8df513b","Type":"ContainerDied","Data":"6bf6a12f8980afbc13466cfcb7e5208d6c566aebd5aa98ff05a195ae11524771"} Dec 04 10:15:03 crc kubenswrapper[4707]: I1204 10:15:03.350216 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" Dec 04 10:15:03 crc kubenswrapper[4707]: I1204 10:15:03.456891 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8v54q\" (UniqueName: \"kubernetes.io/projected/dc0d3be4-db60-4d28-ba62-debad8df513b-kube-api-access-8v54q\") pod \"dc0d3be4-db60-4d28-ba62-debad8df513b\" (UID: \"dc0d3be4-db60-4d28-ba62-debad8df513b\") " Dec 04 10:15:03 crc kubenswrapper[4707]: I1204 10:15:03.457048 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dc0d3be4-db60-4d28-ba62-debad8df513b-secret-volume\") pod \"dc0d3be4-db60-4d28-ba62-debad8df513b\" (UID: \"dc0d3be4-db60-4d28-ba62-debad8df513b\") " Dec 04 10:15:03 crc kubenswrapper[4707]: I1204 10:15:03.457093 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dc0d3be4-db60-4d28-ba62-debad8df513b-config-volume\") pod \"dc0d3be4-db60-4d28-ba62-debad8df513b\" (UID: \"dc0d3be4-db60-4d28-ba62-debad8df513b\") " Dec 04 10:15:03 crc kubenswrapper[4707]: I1204 10:15:03.457929 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc0d3be4-db60-4d28-ba62-debad8df513b-config-volume" (OuterVolumeSpecName: "config-volume") pod "dc0d3be4-db60-4d28-ba62-debad8df513b" (UID: "dc0d3be4-db60-4d28-ba62-debad8df513b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 10:15:03 crc kubenswrapper[4707]: I1204 10:15:03.481073 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc0d3be4-db60-4d28-ba62-debad8df513b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "dc0d3be4-db60-4d28-ba62-debad8df513b" (UID: "dc0d3be4-db60-4d28-ba62-debad8df513b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 10:15:03 crc kubenswrapper[4707]: I1204 10:15:03.481073 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc0d3be4-db60-4d28-ba62-debad8df513b-kube-api-access-8v54q" (OuterVolumeSpecName: "kube-api-access-8v54q") pod "dc0d3be4-db60-4d28-ba62-debad8df513b" (UID: "dc0d3be4-db60-4d28-ba62-debad8df513b"). InnerVolumeSpecName "kube-api-access-8v54q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:15:03 crc kubenswrapper[4707]: I1204 10:15:03.558701 4707 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dc0d3be4-db60-4d28-ba62-debad8df513b-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 10:15:03 crc kubenswrapper[4707]: I1204 10:15:03.558755 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8v54q\" (UniqueName: \"kubernetes.io/projected/dc0d3be4-db60-4d28-ba62-debad8df513b-kube-api-access-8v54q\") on node \"crc\" DevicePath \"\"" Dec 04 10:15:03 crc kubenswrapper[4707]: I1204 10:15:03.558768 4707 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dc0d3be4-db60-4d28-ba62-debad8df513b-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 10:15:04 crc kubenswrapper[4707]: I1204 10:15:04.120884 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" event={"ID":"dc0d3be4-db60-4d28-ba62-debad8df513b","Type":"ContainerDied","Data":"d08109ec91ac181cf1588368aa81563854a65bd28f2d835e4fcb5d6aafcb82d3"} Dec 04 10:15:04 crc kubenswrapper[4707]: I1204 10:15:04.121220 4707 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d08109ec91ac181cf1588368aa81563854a65bd28f2d835e4fcb5d6aafcb82d3" Dec 04 10:15:04 crc kubenswrapper[4707]: I1204 10:15:04.121280 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414055-xb2r8" Dec 04 10:15:04 crc kubenswrapper[4707]: I1204 10:15:04.415859 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k"] Dec 04 10:15:04 crc kubenswrapper[4707]: I1204 10:15:04.421105 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414010-ddr4k"] Dec 04 10:15:04 crc kubenswrapper[4707]: I1204 10:15:04.853620 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1" path="/var/lib/kubelet/pods/109f779b-e5dc-4c5c-910b-5ddc2c9ec3e1/volumes" Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.654118 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2g9gl"] Dec 04 10:15:20 crc kubenswrapper[4707]: E1204 10:15:20.655163 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc0d3be4-db60-4d28-ba62-debad8df513b" containerName="collect-profiles" Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.655182 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc0d3be4-db60-4d28-ba62-debad8df513b" containerName="collect-profiles" Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.655317 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc0d3be4-db60-4d28-ba62-debad8df513b" containerName="collect-profiles" Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.656110 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.670172 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2g9gl"] Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.788505 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-utilities\") pod \"certified-operators-2g9gl\" (UID: \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\") " pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.788636 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvq2s\" (UniqueName: \"kubernetes.io/projected/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-kube-api-access-jvq2s\") pod \"certified-operators-2g9gl\" (UID: \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\") " pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.788677 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-catalog-content\") pod \"certified-operators-2g9gl\" (UID: \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\") " pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.889577 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvq2s\" (UniqueName: \"kubernetes.io/projected/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-kube-api-access-jvq2s\") pod \"certified-operators-2g9gl\" (UID: \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\") " pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.889637 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-catalog-content\") pod \"certified-operators-2g9gl\" (UID: \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\") " pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.889668 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-utilities\") pod \"certified-operators-2g9gl\" (UID: \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\") " pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.890150 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-utilities\") pod \"certified-operators-2g9gl\" (UID: \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\") " pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.890171 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-catalog-content\") pod \"certified-operators-2g9gl\" (UID: \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\") " pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.911036 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvq2s\" (UniqueName: \"kubernetes.io/projected/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-kube-api-access-jvq2s\") pod \"certified-operators-2g9gl\" (UID: \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\") " pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:20 crc kubenswrapper[4707]: I1204 10:15:20.975295 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:21 crc kubenswrapper[4707]: I1204 10:15:21.248415 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2g9gl"] Dec 04 10:15:22 crc kubenswrapper[4707]: I1204 10:15:22.229750 4707 generic.go:334] "Generic (PLEG): container finished" podID="f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" containerID="ec06620782de24084008fc43667db50312d629a1ff89724b18a336691e59fdab" exitCode=0 Dec 04 10:15:22 crc kubenswrapper[4707]: I1204 10:15:22.229795 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2g9gl" event={"ID":"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607","Type":"ContainerDied","Data":"ec06620782de24084008fc43667db50312d629a1ff89724b18a336691e59fdab"} Dec 04 10:15:22 crc kubenswrapper[4707]: I1204 10:15:22.232132 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2g9gl" event={"ID":"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607","Type":"ContainerStarted","Data":"dea9eeed6c348aa81011df1697861d81f94939bf72de6903251018cdb78f95c4"} Dec 04 10:15:22 crc kubenswrapper[4707]: I1204 10:15:22.232141 4707 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 10:15:23 crc kubenswrapper[4707]: I1204 10:15:23.238867 4707 generic.go:334] "Generic (PLEG): container finished" podID="f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" containerID="9d97bf1b590f95d251ae008e79e1173114e4341105b6c43242a349a939c56f05" exitCode=0 Dec 04 10:15:23 crc kubenswrapper[4707]: I1204 10:15:23.238919 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2g9gl" event={"ID":"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607","Type":"ContainerDied","Data":"9d97bf1b590f95d251ae008e79e1173114e4341105b6c43242a349a939c56f05"} Dec 04 10:15:23 crc kubenswrapper[4707]: I1204 10:15:23.365266 4707 scope.go:117] "RemoveContainer" containerID="d56192023c3786e4a0a996f48568c68fdda895ba4bcf0d3f7268cdda2fe3e9ee" Dec 04 10:15:24 crc kubenswrapper[4707]: I1204 10:15:24.247700 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2g9gl" event={"ID":"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607","Type":"ContainerStarted","Data":"9b177548c5e30e47422dc99e25e3835d12607effce2a9a07a8ac4b359d03870f"} Dec 04 10:15:24 crc kubenswrapper[4707]: I1204 10:15:24.265934 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2g9gl" podStartSLOduration=2.866393227 podStartE2EDuration="4.265913365s" podCreationTimestamp="2025-12-04 10:15:20 +0000 UTC" firstStartedPulling="2025-12-04 10:15:22.231795521 +0000 UTC m=+2221.667618028" lastFinishedPulling="2025-12-04 10:15:23.631315659 +0000 UTC m=+2223.067138166" observedRunningTime="2025-12-04 10:15:24.26352095 +0000 UTC m=+2223.699343457" watchObservedRunningTime="2025-12-04 10:15:24.265913365 +0000 UTC m=+2223.701735872" Dec 04 10:15:30 crc kubenswrapper[4707]: I1204 10:15:30.975847 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:30 crc kubenswrapper[4707]: I1204 10:15:30.977893 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:31 crc kubenswrapper[4707]: I1204 10:15:31.023746 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:31 crc kubenswrapper[4707]: I1204 10:15:31.326151 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:31 crc kubenswrapper[4707]: I1204 10:15:31.366924 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2g9gl"] Dec 04 10:15:33 crc kubenswrapper[4707]: I1204 10:15:33.294741 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2g9gl" podUID="f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" containerName="registry-server" containerID="cri-o://9b177548c5e30e47422dc99e25e3835d12607effce2a9a07a8ac4b359d03870f" gracePeriod=2 Dec 04 10:15:33 crc kubenswrapper[4707]: I1204 10:15:33.646488 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:33 crc kubenswrapper[4707]: I1204 10:15:33.764968 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-catalog-content\") pod \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\" (UID: \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\") " Dec 04 10:15:33 crc kubenswrapper[4707]: I1204 10:15:33.772059 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-utilities\") pod \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\" (UID: \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\") " Dec 04 10:15:33 crc kubenswrapper[4707]: I1204 10:15:33.772120 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvq2s\" (UniqueName: \"kubernetes.io/projected/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-kube-api-access-jvq2s\") pod \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\" (UID: \"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607\") " Dec 04 10:15:33 crc kubenswrapper[4707]: I1204 10:15:33.772849 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-utilities" (OuterVolumeSpecName: "utilities") pod "f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" (UID: "f9a7c0a3-84ad-47c3-82b8-0c696fdfc607"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:15:33 crc kubenswrapper[4707]: I1204 10:15:33.777918 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-kube-api-access-jvq2s" (OuterVolumeSpecName: "kube-api-access-jvq2s") pod "f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" (UID: "f9a7c0a3-84ad-47c3-82b8-0c696fdfc607"). InnerVolumeSpecName "kube-api-access-jvq2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:15:33 crc kubenswrapper[4707]: I1204 10:15:33.814848 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" (UID: "f9a7c0a3-84ad-47c3-82b8-0c696fdfc607"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:15:33 crc kubenswrapper[4707]: I1204 10:15:33.874092 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 10:15:33 crc kubenswrapper[4707]: I1204 10:15:33.874120 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvq2s\" (UniqueName: \"kubernetes.io/projected/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-kube-api-access-jvq2s\") on node \"crc\" DevicePath \"\"" Dec 04 10:15:33 crc kubenswrapper[4707]: I1204 10:15:33.874131 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.301284 4707 generic.go:334] "Generic (PLEG): container finished" podID="f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" containerID="9b177548c5e30e47422dc99e25e3835d12607effce2a9a07a8ac4b359d03870f" exitCode=0 Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.301355 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2g9gl" event={"ID":"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607","Type":"ContainerDied","Data":"9b177548c5e30e47422dc99e25e3835d12607effce2a9a07a8ac4b359d03870f"} Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.301424 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2g9gl" event={"ID":"f9a7c0a3-84ad-47c3-82b8-0c696fdfc607","Type":"ContainerDied","Data":"dea9eeed6c348aa81011df1697861d81f94939bf72de6903251018cdb78f95c4"} Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.301448 4707 scope.go:117] "RemoveContainer" containerID="9b177548c5e30e47422dc99e25e3835d12607effce2a9a07a8ac4b359d03870f" Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.301382 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2g9gl" Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.318472 4707 scope.go:117] "RemoveContainer" containerID="9d97bf1b590f95d251ae008e79e1173114e4341105b6c43242a349a939c56f05" Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.340896 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2g9gl"] Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.346293 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2g9gl"] Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.347043 4707 scope.go:117] "RemoveContainer" containerID="ec06620782de24084008fc43667db50312d629a1ff89724b18a336691e59fdab" Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.361552 4707 scope.go:117] "RemoveContainer" containerID="9b177548c5e30e47422dc99e25e3835d12607effce2a9a07a8ac4b359d03870f" Dec 04 10:15:34 crc kubenswrapper[4707]: E1204 10:15:34.362058 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b177548c5e30e47422dc99e25e3835d12607effce2a9a07a8ac4b359d03870f\": container with ID starting with 9b177548c5e30e47422dc99e25e3835d12607effce2a9a07a8ac4b359d03870f not found: ID does not exist" containerID="9b177548c5e30e47422dc99e25e3835d12607effce2a9a07a8ac4b359d03870f" Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.362109 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b177548c5e30e47422dc99e25e3835d12607effce2a9a07a8ac4b359d03870f"} err="failed to get container status \"9b177548c5e30e47422dc99e25e3835d12607effce2a9a07a8ac4b359d03870f\": rpc error: code = NotFound desc = could not find container \"9b177548c5e30e47422dc99e25e3835d12607effce2a9a07a8ac4b359d03870f\": container with ID starting with 9b177548c5e30e47422dc99e25e3835d12607effce2a9a07a8ac4b359d03870f not found: ID does not exist" Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.362148 4707 scope.go:117] "RemoveContainer" containerID="9d97bf1b590f95d251ae008e79e1173114e4341105b6c43242a349a939c56f05" Dec 04 10:15:34 crc kubenswrapper[4707]: E1204 10:15:34.362725 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d97bf1b590f95d251ae008e79e1173114e4341105b6c43242a349a939c56f05\": container with ID starting with 9d97bf1b590f95d251ae008e79e1173114e4341105b6c43242a349a939c56f05 not found: ID does not exist" containerID="9d97bf1b590f95d251ae008e79e1173114e4341105b6c43242a349a939c56f05" Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.362766 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d97bf1b590f95d251ae008e79e1173114e4341105b6c43242a349a939c56f05"} err="failed to get container status \"9d97bf1b590f95d251ae008e79e1173114e4341105b6c43242a349a939c56f05\": rpc error: code = NotFound desc = could not find container \"9d97bf1b590f95d251ae008e79e1173114e4341105b6c43242a349a939c56f05\": container with ID starting with 9d97bf1b590f95d251ae008e79e1173114e4341105b6c43242a349a939c56f05 not found: ID does not exist" Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.362793 4707 scope.go:117] "RemoveContainer" containerID="ec06620782de24084008fc43667db50312d629a1ff89724b18a336691e59fdab" Dec 04 10:15:34 crc kubenswrapper[4707]: E1204 10:15:34.363304 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec06620782de24084008fc43667db50312d629a1ff89724b18a336691e59fdab\": container with ID starting with ec06620782de24084008fc43667db50312d629a1ff89724b18a336691e59fdab not found: ID does not exist" containerID="ec06620782de24084008fc43667db50312d629a1ff89724b18a336691e59fdab" Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.363354 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec06620782de24084008fc43667db50312d629a1ff89724b18a336691e59fdab"} err="failed to get container status \"ec06620782de24084008fc43667db50312d629a1ff89724b18a336691e59fdab\": rpc error: code = NotFound desc = could not find container \"ec06620782de24084008fc43667db50312d629a1ff89724b18a336691e59fdab\": container with ID starting with ec06620782de24084008fc43667db50312d629a1ff89724b18a336691e59fdab not found: ID does not exist" Dec 04 10:15:34 crc kubenswrapper[4707]: I1204 10:15:34.856107 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" path="/var/lib/kubelet/pods/f9a7c0a3-84ad-47c3-82b8-0c696fdfc607/volumes" Dec 04 10:15:41 crc kubenswrapper[4707]: I1204 10:15:41.872591 4707 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-692kp"] Dec 04 10:15:41 crc kubenswrapper[4707]: E1204 10:15:41.873464 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" containerName="extract-content" Dec 04 10:15:41 crc kubenswrapper[4707]: I1204 10:15:41.873486 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" containerName="extract-content" Dec 04 10:15:41 crc kubenswrapper[4707]: E1204 10:15:41.873496 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" containerName="registry-server" Dec 04 10:15:41 crc kubenswrapper[4707]: I1204 10:15:41.873501 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" containerName="registry-server" Dec 04 10:15:41 crc kubenswrapper[4707]: E1204 10:15:41.873517 4707 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" containerName="extract-utilities" Dec 04 10:15:41 crc kubenswrapper[4707]: I1204 10:15:41.873524 4707 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" containerName="extract-utilities" Dec 04 10:15:41 crc kubenswrapper[4707]: I1204 10:15:41.873616 4707 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a7c0a3-84ad-47c3-82b8-0c696fdfc607" containerName="registry-server" Dec 04 10:15:41 crc kubenswrapper[4707]: I1204 10:15:41.875514 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:41 crc kubenswrapper[4707]: I1204 10:15:41.878656 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-692kp"] Dec 04 10:15:41 crc kubenswrapper[4707]: I1204 10:15:41.904121 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba557e0e-a55c-459f-b696-7cf569af93ec-utilities\") pod \"redhat-marketplace-692kp\" (UID: \"ba557e0e-a55c-459f-b696-7cf569af93ec\") " pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:41 crc kubenswrapper[4707]: I1204 10:15:41.904198 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba557e0e-a55c-459f-b696-7cf569af93ec-catalog-content\") pod \"redhat-marketplace-692kp\" (UID: \"ba557e0e-a55c-459f-b696-7cf569af93ec\") " pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:41 crc kubenswrapper[4707]: I1204 10:15:41.904272 4707 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4t9c\" (UniqueName: \"kubernetes.io/projected/ba557e0e-a55c-459f-b696-7cf569af93ec-kube-api-access-z4t9c\") pod \"redhat-marketplace-692kp\" (UID: \"ba557e0e-a55c-459f-b696-7cf569af93ec\") " pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:42 crc kubenswrapper[4707]: I1204 10:15:42.005058 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba557e0e-a55c-459f-b696-7cf569af93ec-catalog-content\") pod \"redhat-marketplace-692kp\" (UID: \"ba557e0e-a55c-459f-b696-7cf569af93ec\") " pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:42 crc kubenswrapper[4707]: I1204 10:15:42.005170 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4t9c\" (UniqueName: \"kubernetes.io/projected/ba557e0e-a55c-459f-b696-7cf569af93ec-kube-api-access-z4t9c\") pod \"redhat-marketplace-692kp\" (UID: \"ba557e0e-a55c-459f-b696-7cf569af93ec\") " pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:42 crc kubenswrapper[4707]: I1204 10:15:42.005199 4707 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba557e0e-a55c-459f-b696-7cf569af93ec-utilities\") pod \"redhat-marketplace-692kp\" (UID: \"ba557e0e-a55c-459f-b696-7cf569af93ec\") " pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:42 crc kubenswrapper[4707]: I1204 10:15:42.005639 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba557e0e-a55c-459f-b696-7cf569af93ec-catalog-content\") pod \"redhat-marketplace-692kp\" (UID: \"ba557e0e-a55c-459f-b696-7cf569af93ec\") " pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:42 crc kubenswrapper[4707]: I1204 10:15:42.005662 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba557e0e-a55c-459f-b696-7cf569af93ec-utilities\") pod \"redhat-marketplace-692kp\" (UID: \"ba557e0e-a55c-459f-b696-7cf569af93ec\") " pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:42 crc kubenswrapper[4707]: I1204 10:15:42.025371 4707 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4t9c\" (UniqueName: \"kubernetes.io/projected/ba557e0e-a55c-459f-b696-7cf569af93ec-kube-api-access-z4t9c\") pod \"redhat-marketplace-692kp\" (UID: \"ba557e0e-a55c-459f-b696-7cf569af93ec\") " pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:42 crc kubenswrapper[4707]: I1204 10:15:42.191554 4707 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:42 crc kubenswrapper[4707]: I1204 10:15:42.442463 4707 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-692kp"] Dec 04 10:15:43 crc kubenswrapper[4707]: I1204 10:15:43.356259 4707 generic.go:334] "Generic (PLEG): container finished" podID="ba557e0e-a55c-459f-b696-7cf569af93ec" containerID="fdcfc581692923b023f7320e9791366e2e2ecc94787354e265d655ed6684632c" exitCode=0 Dec 04 10:15:43 crc kubenswrapper[4707]: I1204 10:15:43.356371 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-692kp" event={"ID":"ba557e0e-a55c-459f-b696-7cf569af93ec","Type":"ContainerDied","Data":"fdcfc581692923b023f7320e9791366e2e2ecc94787354e265d655ed6684632c"} Dec 04 10:15:43 crc kubenswrapper[4707]: I1204 10:15:43.356847 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-692kp" event={"ID":"ba557e0e-a55c-459f-b696-7cf569af93ec","Type":"ContainerStarted","Data":"7ac765340726d05c6e6a30890a7373908a0ae5f29224dc02bddd82b02e1d83e9"} Dec 04 10:15:44 crc kubenswrapper[4707]: I1204 10:15:44.363305 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-692kp" event={"ID":"ba557e0e-a55c-459f-b696-7cf569af93ec","Type":"ContainerStarted","Data":"e20ed12194932955d2784ef8a52414cc5c671c3b5e1bc807fefd1490975a0e43"} Dec 04 10:15:45 crc kubenswrapper[4707]: I1204 10:15:45.372589 4707 generic.go:334] "Generic (PLEG): container finished" podID="ba557e0e-a55c-459f-b696-7cf569af93ec" containerID="e20ed12194932955d2784ef8a52414cc5c671c3b5e1bc807fefd1490975a0e43" exitCode=0 Dec 04 10:15:45 crc kubenswrapper[4707]: I1204 10:15:45.372718 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-692kp" event={"ID":"ba557e0e-a55c-459f-b696-7cf569af93ec","Type":"ContainerDied","Data":"e20ed12194932955d2784ef8a52414cc5c671c3b5e1bc807fefd1490975a0e43"} Dec 04 10:15:46 crc kubenswrapper[4707]: I1204 10:15:46.380697 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-692kp" event={"ID":"ba557e0e-a55c-459f-b696-7cf569af93ec","Type":"ContainerStarted","Data":"959f1721736fe76b8fbe614c3424b5162cad657e1f8a1875d4de727a3341517d"} Dec 04 10:15:46 crc kubenswrapper[4707]: I1204 10:15:46.405667 4707 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-692kp" podStartSLOduration=2.966352643 podStartE2EDuration="5.405646455s" podCreationTimestamp="2025-12-04 10:15:41 +0000 UTC" firstStartedPulling="2025-12-04 10:15:43.358197048 +0000 UTC m=+2242.794019555" lastFinishedPulling="2025-12-04 10:15:45.79749086 +0000 UTC m=+2245.233313367" observedRunningTime="2025-12-04 10:15:46.400393139 +0000 UTC m=+2245.836215646" watchObservedRunningTime="2025-12-04 10:15:46.405646455 +0000 UTC m=+2245.841468962" Dec 04 10:15:52 crc kubenswrapper[4707]: I1204 10:15:52.193862 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:52 crc kubenswrapper[4707]: I1204 10:15:52.194363 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:52 crc kubenswrapper[4707]: I1204 10:15:52.252963 4707 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:52 crc kubenswrapper[4707]: I1204 10:15:52.458777 4707 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:52 crc kubenswrapper[4707]: I1204 10:15:52.500942 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-692kp"] Dec 04 10:15:54 crc kubenswrapper[4707]: I1204 10:15:54.429570 4707 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-692kp" podUID="ba557e0e-a55c-459f-b696-7cf569af93ec" containerName="registry-server" containerID="cri-o://959f1721736fe76b8fbe614c3424b5162cad657e1f8a1875d4de727a3341517d" gracePeriod=2 Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.262841 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.372139 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba557e0e-a55c-459f-b696-7cf569af93ec-utilities\") pod \"ba557e0e-a55c-459f-b696-7cf569af93ec\" (UID: \"ba557e0e-a55c-459f-b696-7cf569af93ec\") " Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.372280 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba557e0e-a55c-459f-b696-7cf569af93ec-catalog-content\") pod \"ba557e0e-a55c-459f-b696-7cf569af93ec\" (UID: \"ba557e0e-a55c-459f-b696-7cf569af93ec\") " Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.372420 4707 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4t9c\" (UniqueName: \"kubernetes.io/projected/ba557e0e-a55c-459f-b696-7cf569af93ec-kube-api-access-z4t9c\") pod \"ba557e0e-a55c-459f-b696-7cf569af93ec\" (UID: \"ba557e0e-a55c-459f-b696-7cf569af93ec\") " Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.373976 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba557e0e-a55c-459f-b696-7cf569af93ec-utilities" (OuterVolumeSpecName: "utilities") pod "ba557e0e-a55c-459f-b696-7cf569af93ec" (UID: "ba557e0e-a55c-459f-b696-7cf569af93ec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.384958 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba557e0e-a55c-459f-b696-7cf569af93ec-kube-api-access-z4t9c" (OuterVolumeSpecName: "kube-api-access-z4t9c") pod "ba557e0e-a55c-459f-b696-7cf569af93ec" (UID: "ba557e0e-a55c-459f-b696-7cf569af93ec"). InnerVolumeSpecName "kube-api-access-z4t9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.395819 4707 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba557e0e-a55c-459f-b696-7cf569af93ec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba557e0e-a55c-459f-b696-7cf569af93ec" (UID: "ba557e0e-a55c-459f-b696-7cf569af93ec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.436752 4707 generic.go:334] "Generic (PLEG): container finished" podID="ba557e0e-a55c-459f-b696-7cf569af93ec" containerID="959f1721736fe76b8fbe614c3424b5162cad657e1f8a1875d4de727a3341517d" exitCode=0 Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.436804 4707 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-692kp" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.436816 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-692kp" event={"ID":"ba557e0e-a55c-459f-b696-7cf569af93ec","Type":"ContainerDied","Data":"959f1721736fe76b8fbe614c3424b5162cad657e1f8a1875d4de727a3341517d"} Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.436997 4707 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-692kp" event={"ID":"ba557e0e-a55c-459f-b696-7cf569af93ec","Type":"ContainerDied","Data":"7ac765340726d05c6e6a30890a7373908a0ae5f29224dc02bddd82b02e1d83e9"} Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.437019 4707 scope.go:117] "RemoveContainer" containerID="959f1721736fe76b8fbe614c3424b5162cad657e1f8a1875d4de727a3341517d" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.461399 4707 scope.go:117] "RemoveContainer" containerID="e20ed12194932955d2784ef8a52414cc5c671c3b5e1bc807fefd1490975a0e43" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.473695 4707 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4t9c\" (UniqueName: \"kubernetes.io/projected/ba557e0e-a55c-459f-b696-7cf569af93ec-kube-api-access-z4t9c\") on node \"crc\" DevicePath \"\"" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.473738 4707 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba557e0e-a55c-459f-b696-7cf569af93ec-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.473752 4707 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba557e0e-a55c-459f-b696-7cf569af93ec-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.474613 4707 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-692kp"] Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.479116 4707 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-692kp"] Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.502316 4707 scope.go:117] "RemoveContainer" containerID="fdcfc581692923b023f7320e9791366e2e2ecc94787354e265d655ed6684632c" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.516000 4707 scope.go:117] "RemoveContainer" containerID="959f1721736fe76b8fbe614c3424b5162cad657e1f8a1875d4de727a3341517d" Dec 04 10:15:55 crc kubenswrapper[4707]: E1204 10:15:55.516329 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"959f1721736fe76b8fbe614c3424b5162cad657e1f8a1875d4de727a3341517d\": container with ID starting with 959f1721736fe76b8fbe614c3424b5162cad657e1f8a1875d4de727a3341517d not found: ID does not exist" containerID="959f1721736fe76b8fbe614c3424b5162cad657e1f8a1875d4de727a3341517d" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.516393 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"959f1721736fe76b8fbe614c3424b5162cad657e1f8a1875d4de727a3341517d"} err="failed to get container status \"959f1721736fe76b8fbe614c3424b5162cad657e1f8a1875d4de727a3341517d\": rpc error: code = NotFound desc = could not find container \"959f1721736fe76b8fbe614c3424b5162cad657e1f8a1875d4de727a3341517d\": container with ID starting with 959f1721736fe76b8fbe614c3424b5162cad657e1f8a1875d4de727a3341517d not found: ID does not exist" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.516419 4707 scope.go:117] "RemoveContainer" containerID="e20ed12194932955d2784ef8a52414cc5c671c3b5e1bc807fefd1490975a0e43" Dec 04 10:15:55 crc kubenswrapper[4707]: E1204 10:15:55.516712 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e20ed12194932955d2784ef8a52414cc5c671c3b5e1bc807fefd1490975a0e43\": container with ID starting with e20ed12194932955d2784ef8a52414cc5c671c3b5e1bc807fefd1490975a0e43 not found: ID does not exist" containerID="e20ed12194932955d2784ef8a52414cc5c671c3b5e1bc807fefd1490975a0e43" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.516756 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e20ed12194932955d2784ef8a52414cc5c671c3b5e1bc807fefd1490975a0e43"} err="failed to get container status \"e20ed12194932955d2784ef8a52414cc5c671c3b5e1bc807fefd1490975a0e43\": rpc error: code = NotFound desc = could not find container \"e20ed12194932955d2784ef8a52414cc5c671c3b5e1bc807fefd1490975a0e43\": container with ID starting with e20ed12194932955d2784ef8a52414cc5c671c3b5e1bc807fefd1490975a0e43 not found: ID does not exist" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.516774 4707 scope.go:117] "RemoveContainer" containerID="fdcfc581692923b023f7320e9791366e2e2ecc94787354e265d655ed6684632c" Dec 04 10:15:55 crc kubenswrapper[4707]: E1204 10:15:55.517115 4707 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdcfc581692923b023f7320e9791366e2e2ecc94787354e265d655ed6684632c\": container with ID starting with fdcfc581692923b023f7320e9791366e2e2ecc94787354e265d655ed6684632c not found: ID does not exist" containerID="fdcfc581692923b023f7320e9791366e2e2ecc94787354e265d655ed6684632c" Dec 04 10:15:55 crc kubenswrapper[4707]: I1204 10:15:55.517140 4707 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdcfc581692923b023f7320e9791366e2e2ecc94787354e265d655ed6684632c"} err="failed to get container status \"fdcfc581692923b023f7320e9791366e2e2ecc94787354e265d655ed6684632c\": rpc error: code = NotFound desc = could not find container \"fdcfc581692923b023f7320e9791366e2e2ecc94787354e265d655ed6684632c\": container with ID starting with fdcfc581692923b023f7320e9791366e2e2ecc94787354e265d655ed6684632c not found: ID does not exist" Dec 04 10:15:56 crc kubenswrapper[4707]: I1204 10:15:56.855986 4707 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba557e0e-a55c-459f-b696-7cf569af93ec" path="/var/lib/kubelet/pods/ba557e0e-a55c-459f-b696-7cf569af93ec/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114257546024457 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114257547017375 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114252757016516 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114252760015460 5ustar corecore